From f0db9bca50d96c6eb43d548d004fa5528671fc7c Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Wed, 22 Jan 2025 15:42:28 +0100 Subject: [PATCH 01/13] Add transformations support --- .../core/model/transformation_project.go | 76 +++ .../core/schema/transformation_project.go | 135 ++++++ .../resources/transformation_project.go | 260 +++++++++++ .../resources/transformation_project_test.go | 435 ++++++++++++++++++ 4 files changed, 906 insertions(+) create mode 100644 fivetran/framework/core/model/transformation_project.go create mode 100644 fivetran/framework/core/schema/transformation_project.go create mode 100644 fivetran/framework/resources/transformation_project.go create mode 100644 fivetran/framework/resources/transformation_project_test.go diff --git a/fivetran/framework/core/model/transformation_project.go b/fivetran/framework/core/model/transformation_project.go new file mode 100644 index 00000000..5ced1e05 --- /dev/null +++ b/fivetran/framework/core/model/transformation_project.go @@ -0,0 +1,76 @@ +package model + +import ( + "context" + + "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type TransformationProject struct { + Id types.String `tfsdk:"id"` + GroupId types.String `tfsdk:"group_id"` + Type types.String `tfsdk:"type"` + Status types.String `tfsdk:"status"` + CreatedAt types.String `tfsdk:"created_at"` + CreatedById types.String `tfsdk:"created_by_id"` + Errors types.Set `tfsdk:"errors"` + RunTests types.Bool `tfsdk:"run_tests"` + ProjectConfig types.Object `tfsdk:"project_config"` +} + +func (d *TransformationProject) ReadFromResponse(ctx context.Context, resp transformations.TransformationProjectResponse) { + d.Id = types.StringValue(resp.Data.Id) + d.GroupId = types.StringValue(resp.Data.GroupId) + d.Type = types.StringValue(resp.Data.ProjectType) + d.CreatedAt = types.StringValue(resp.Data.CreatedAt) + d.CreatedById = types.StringValue(resp.Data.CreatedById) + d.Status = types.StringValue(resp.Data.Status) + + errors := []attr.Value{} + for _, el := range resp.Data.Errors { + errors = append(errors, types.StringValue(el)) + } + if len(errors) > 0 { + d.Errors = types.SetValueMust(types.StringType, errors) + } else { + if d.Errors.IsUnknown() { + d.Errors = types.SetNull(types.StringType) + } + } + + projectConfigTypes := map[string]attr.Type{ + "dbt_version": types.StringType, + "default_schema": types.StringType, + "git_remote_url": types.StringType, + "folder_path": types.StringType, + "git_branch": types.StringType, + "target_name": types.StringType, + "environment_vars": types.SetType{ElemType: types.StringType}, + "public_key": types.StringType, + "threads": types.Int64Type, + } + projectConfigItems := map[string]attr.Value{} + projectConfigItems["dbt_version"] = types.StringValue(resp.Data.ProjectConfig.DbtVersion) + projectConfigItems["default_schema"] = types.StringValue(resp.Data.ProjectConfig.DefaultSchema) + projectConfigItems["git_remote_url"] = types.StringValue(resp.Data.ProjectConfig.GitRemoteUrl) + projectConfigItems["folder_path"] = types.StringValue(resp.Data.ProjectConfig.FolderPath) + projectConfigItems["git_branch"] = types.StringValue(resp.Data.ProjectConfig.GitBranch) + projectConfigItems["target_name"] = types.StringValue(resp.Data.ProjectConfig.TargetName) + projectConfigItems["public_key"] = types.StringValue(resp.Data.ProjectConfig.PublicKey) + projectConfigItems["threads"] = types.Int64Value(int64(resp.Data.ProjectConfig.Threads)) + + envVars := []attr.Value{} + for _, el := range resp.Data.ProjectConfig.EnvironmentVars { + envVars = append(envVars, types.StringValue(el)) + } + if len(envVars) > 0 { + projectConfigItems["environment_vars"] = types.SetValueMust(types.StringType, envVars) + } else { + projectConfigItems["environment_vars"] = types.SetNull(types.StringType) + } + + + d.ProjectConfig, _ = types.ObjectValue(projectConfigTypes, projectConfigItems) +} \ No newline at end of file diff --git a/fivetran/framework/core/schema/transformation_project.go b/fivetran/framework/core/schema/transformation_project.go new file mode 100644 index 00000000..cf8704e4 --- /dev/null +++ b/fivetran/framework/core/schema/transformation_project.go @@ -0,0 +1,135 @@ +package schema + +import ( + "context" + + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + resourceSchema "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +func TransformationProjectResource(ctx context.Context) resourceSchema.Schema { + return resourceSchema.Schema{ + Attributes: transformationProjectSchema().GetResourceSchema(), + Blocks: map[string]resourceSchema.Block{ + "project_config": resourceSchema.SingleNestedBlock{ + Attributes: transformationProjectConfigSchema().GetResourceSchema(), + }, + }, + } +} + +func TransformationProjectDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema{ + Attributes: transformationProjectSchema().GetDatasourceSchema(), + Blocks: map[string]datasourceSchema.Block{ + "project_config": datasourceSchema.SingleNestedBlock{ + Attributes: dbtProjectConfigSchema().GetDatasourceSchema(), + }, + }, + } +} + +func TransformationProjectListDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema{ + Attributes: transformationProjectSchema().GetDatasourceSchema(), + } +} + +func transformationProjectSchema() core.Schema { + return core.Schema{ + Fields: map[string]core.SchemaField{ + "id": { + IsId: true, + ValueType: core.String, + Description: "The unique identifier for the dbt Project within the Fivetran system.", + }, + "group_id": { + Required: true, + ForceNew: true, + ValueType: core.String, + Description: "The unique identifier for the group within the Fivetran system.", + }, + "type": { + Required: true, + ForceNew: true, + ValueType: core.String, + Description: "Transformation project type.", + }, + "status": { + ValueType: core.String, + Readonly: true, + ResourceOnly:true, + Description: "Status of dbt Project (NOT_READY, READY, ERROR).", + }, + "created_at": { + ValueType: core.String, + Readonly: true, + Description: "The timestamp of the dbt Project creation.", + }, + "created_by_id": { + ValueType: core.String, + Readonly: true, + Description: "The unique identifier for the User within the Fivetran system who created the dbt Project.", + }, + "errors": { + ValueType: core.StringsSet, + Readonly: true, + Description: "List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: \"DBT_VARIABLE=variable_value\"", + }, + "run_tests": { + ValueType: core.Boolean, + ResourceOnly:true, + Description: "Specifies whether the setup tests should be run automatically. The default value is TRUE.", + }, + }, + } +} + +func transformationProjectConfigSchema() core.Schema { + return core.Schema{ + Fields: map[string]core.SchemaField{ + "dbt_version": { + ValueType: core.String, + ForceNew: true, + Description: "The version of dbt that should run the project", + }, + "default_schema": { + ValueType: core.String, + ForceNew: true, + Description: "Default schema in destination. This production schema will contain your transformed data.", + }, + "git_remote_url": { + ValueType: core.String, + ForceNew: true, + Description: "Git remote URL with your dbt project", + }, + "folder_path": { + ValueType: core.String, + Description: "Folder in Git repo with your dbt project", + }, + "git_branch": { + ValueType: core.String, + Description: "Git branch", + }, + "threads": { + ValueType: core.Integer, + Description: "The number of threads dbt will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse.", + }, + "target_name": { + ValueType: core.String, + Description: "Target name to set or override the value from the deployment.yaml", + }, + "environment_vars": { + ValueType: core.StringsSet, + Description: "List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: \"DBT_VARIABLE=variable_value\"", + }, + "public_key": { + ValueType: core.String, + Readonly: true, + Description: "Public key to grant Fivetran SSH access to git repository.", + }, + + }, + } +} \ No newline at end of file diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go new file mode 100644 index 00000000..fef974f9 --- /dev/null +++ b/fivetran/framework/resources/transformation_project.go @@ -0,0 +1,260 @@ +package resources + +import ( + "context" + "fmt" + + "github.com/fivetran/go-fivetran" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +func TransformationProject() resource.Resource { + return &transformationProject{} +} + +type transformationProject struct { + core.ProviderResource +} + +// Ensure the implementation satisfies the desired interfaces. +var _ resource.ResourceWithConfigure = &transformationProject{} +var _ resource.ResourceWithImportState = &transformationProject{} + +func (r *transformationProject) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "fivetran_transformation_project" +} + +func (r *transformationProject) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = fivetranSchema.DbtProjectResource(ctx) +} + +func (r *transformationProject) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *transformationProject) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.TransformationProject + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + client := r.GetClient() + svc := client.NewTransformationProjectCreate() + + svc.GroupId(data.GroupId.ValueString()) + svc.ProjectType(data.Type.ValueString()) + svc.RunTests(data.RunTests.ValueBool()) + + if !data.ProjectConfig.IsNull() && !data.ProjectConfig.IsUnknown() { + projectConfig := fivetran.NewTransformationProjectConfig() + projectConfigAttributes := data.ProjectConfig.Attributes() + projectConfig.DbtVersion(projectConfigAttributes["dbt_version"].(basetypes.StringValue).ValueString()) + projectConfig.DefaultSchema(projectConfigAttributes["default_schema"].(basetypes.StringValue).ValueString()) + projectConfig.GitRemoteUrl(projectConfigAttributes["git_remote_url"].(basetypes.StringValue).ValueString()) + projectConfig.FolderPath(projectConfigAttributes["folder_path"].(basetypes.StringValue).ValueString()) + projectConfig.GitBranch(projectConfigAttributes["git_branch"].(basetypes.StringValue).ValueString()) + projectConfig.TargetName(projectConfigAttributes["target_name"].(basetypes.StringValue).ValueString()) + projectConfig.Threads(int(projectConfigAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + + if !projectConfigAttributes["environment_vars"].IsUnknown() && !projectConfigAttributes["environment_vars"].IsNull() { + evars := []string{} + for _, ev := range projectConfigAttributes["environment_vars"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + projectConfig.EnvironmentVars(evars) + } + + svc.ProjectConfig(projectConfig) + } + + projectResponse, err := svc.Do(ctx) + if err != nil { + if projectResponse.Code != "DbtProjectExists" { + resp.Diagnostics.AddError( + "Unable to Create dbt Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + + return + } else { + // try to recover Id + projectListResponse, err := r.GetClient().NewTransformationProjectsList().Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Read Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } + + for _, v := range projectListResponse.Data.Items { + if v.GroupId == data.GroupId.ValueString() { + projectResponse, err := r.GetClient().NewTransformationProjectDetails().ProjectId(v.Id).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Read Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } + } + } + } + } + + data.ReadFromResponse(ctx, projectResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + if resp.Diagnostics.HasError() { + // Do cleanup on error + deleteResponse, err := client.NewTransformationProjectDelete().ProjectId(projectResponse.Data.Id).Do(ctx) + if err != nil { + resp.Diagnostics.AddError( + "Unable to Cleanup Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, deleteResponse.Code, deleteResponse.Message), + ) + } + } +} + +func (r *transformationProject) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.TransformationProject + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + projectResponse, err := r.GetClient().NewTransformationProjectDetails().ProjectId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Read Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } + + data.ReadFromResponse(ctx, projectResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *transformationProject) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var state model.TransformationProject + var plan model.TransformationProject + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + + if resp.Diagnostics.HasError() { + return + } + + svc := r.GetClient().NewTransformationProjectUpdate() + svc.ProjectId(state.Id.ValueString()) + svc.RunTests(plan.RunTests.ValueBool()) + + if !plan.ProjectConfig.IsUnknown() && !state.ProjectConfig.Equal(plan.ProjectConfig) { + projectConfig := fivetran.NewTransformationProjectConfig() + projectConfigAttributes := plan.ProjectConfig.Attributes() + projectConfig.FolderPath(projectConfigAttributes["folder_path"].(basetypes.StringValue).ValueString()) + projectConfig.GitBranch(projectConfigAttributes["git_branch"].(basetypes.StringValue).ValueString()) + projectConfig.TargetName(projectConfigAttributes["target_name"].(basetypes.StringValue).ValueString()) + projectConfig.Threads(int(projectConfigAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + + if !projectConfigAttributes["environment_vars"].IsUnknown() && !projectConfigAttributes["environment_vars"].IsNull() { + evars := []string{} + for _, ev := range projectConfigAttributes["environment_vars"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + projectConfig.EnvironmentVars(evars) + } + svc.ProjectConfig(projectConfig) + } + + projectResponse, err := svc.Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Update Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } + + plan.ReadFromResponse(ctx, projectResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *transformationProject) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.TransformationProject + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + deleteResponse, err := r.GetClient().NewTransformationProjectDelete().ProjectId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Delete transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, deleteResponse.Code, deleteResponse.Message), + ) + return + } +} diff --git a/fivetran/framework/resources/transformation_project_test.go b/fivetran/framework/resources/transformation_project_test.go new file mode 100644 index 00000000..a32d2315 --- /dev/null +++ b/fivetran/framework/resources/transformation_project_test.go @@ -0,0 +1,435 @@ +package resources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationProjectResourceMockGetHandler *mock.Handler + transformationProjectResourceMockPostHandler *mock.Handler + transformationProjectResourceMockPatchHandler *mock.Handler + transformationProjectResourceMockDeleteHandler *mock.Handler + + transformationProjectResourceMockData map[string]interface{} + + transformationProjectResourceCreateMockGetHandler *mock.Handler + transformationProjectResourceCreateMockGetModelsHandler *mock.Handler + transformationProjectResourceCreateMockPostHandler *mock.Handler + transformationProjectResourceCreateMockDeleteHandler *mock.Handler + transformationModelsDataSourceMockGetHandler *mock.Handler + + transformationModelsDataSourceMockData map[string]interface{} + transformationProjectResourceCreateMockData map[string]interface{} +) + +func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { + transformationProjectResponse := ` +{ + "id": "string", + "type": "DBT_GIT", + "status": "NOT_READY", + "errors": [ + "string" + ], + "created_at": "2019-08-24T14:15:22Z", + "group_id": "string", + "setup_tests": [ + { + "title": "Test Title", + "status": "FAILED", + "message": "Error message", + "details": "Error details" + } + ], + "created_by_id": "string", + "project_config": { + "dbt_version": "string", + "default_schema": "string", + "git_remote_url": "string", + "folder_path": "string", + "git_branch": "string", + "threads": 0, + "target_name": "string", + "environment_vars": [ + "string" + ], + "public_key": "string" + } + }` + tfmock.MockClient().Reset() + + transformationProjectResourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects/project_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectResourceMockData), nil + }, + ) + + transformationProjectResourceMockPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/transformation-projects/project_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + + tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version_1") + tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name_1") + + varsFromRequest := body["environment_vars"].([]interface{}) + tfmock.AssertEqual(t, len(varsFromRequest), 1) + tfmock.AssertEqual(t, varsFromRequest[0], "environment_var_1") + + tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(2)) + + tfmock.AssertKeyExists(t, body, "project_config") + + config := body["project_config"].(map[string]interface{}) + + tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch_1") + tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path_1") + + for k, v := range body { + if k != "project_config" { + transformationProjectResourceMockData[k] = v + } else { + projectConfig := transformationProjectResourceMockData[k].(map[string]interface{}) + for ck, cv := range config { + projectConfig[ck] = cv + } + } + } + + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectResourceMockData), nil + }, + ) + + transformationProjectResourceMockPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformation-projects").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + + tfmock.AssertKeyExistsAndHasValue(t, body, "group_id", "group_id") + tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version") + tfmock.AssertKeyExistsAndHasValue(t, body, "default_schema", "default_schema") + tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name") + varsFromRequest := body["environment_vars"].([]interface{}) + tfmock.AssertEqual(t, len(varsFromRequest), 1) + tfmock.AssertEqual(t, varsFromRequest[0], "environment_var") + + tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(1)) + tfmock.AssertKeyExistsAndHasValue(t, body, "type", "GIT") + + tfmock.AssertKeyExists(t, body, "project_config") + + config := body["project_config"].(map[string]interface{}) + + tfmock.AssertKeyExistsAndHasValue(t, config, "git_remote_url", "git_remote_url") + tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch") + tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") + + transformationProjectResourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationProjectResourceMockData), nil + }, + ) + + transformationProjectResourceMockDeleteHandler = tfmock.MockClient().When(http.MethodDelete, + "/v1/transformation-projects/project_id", + ).ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationProjectResourceMockData = nil + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", nil), nil + }, + ) +} + +func TestResourceTransformationProjectMappingMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch_1" + folder_path = "folder_path_1" + dbt_version = "string" + default_schema = "string" + threads = 0 + target_name = "string" + } + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectResourceMockPostHandler.Interactions, 1) + tfmock.AssertEqual(t, transformationProjectResourceMockGetHandler.Interactions, 0) + tfmock.AssertNotEmpty(t, transformationProjectResourceMockData) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), + ), + } + + step2 := resource.TestStep{ + Config: ` + resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch_1" + folder_path = "folder_path_1" + dbt_version = "string" + default_schema = "string" + threads = 1 + target_name = "string" + } + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectResourceMockPatchHandler.Interactions, 1) + tfmock.AssertEqual(t, transformationProjectResourceMockGetHandler.Interactions, 2) + tfmock.AssertNotEmpty(t, transformationProjectResourceMockData) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version_1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name_1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var_1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "2"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch_1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path_1"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationProjectResourceMappingTest(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + step2, + }, + }, + ) +} + +func setupMockClientTransformationProjectResourceCreateTest(t *testing.T) { + + transformationProjectResponse := ` +{ + "id": "string", + "type": "DBT_GIT", + "status": "NOT_READY", + "errors": [ + "string" + ], + "created_at": "2019-08-24T14:15:22Z", + "group_id": "string", + "setup_tests": [ + { + "title": "Test Title", + "status": "FAILED", + "message": "Error message", + "details": "Error details" + } + ], + "created_by_id": "string", + "project_config": { + "dbt_version": "string", + "default_schema": "string", + "git_remote_url": "string", + "folder_path": "string", + "git_branch": "string", + "threads": 0, + "target_name": "string", + "environment_vars": [ + "string" + ], + "public_key": "string" + } + }` + + transformationProjectResponseReady := ` +{ + "id": "string", + "type": "DBT_GIT", + "status": "NOT_READY", + "errors": [ + "string" + ], + "created_at": "2019-08-24T14:15:22Z", + "group_id": "string", + "setup_tests": [ + { + "title": "Test Title", + "status": "FAILED", + "message": "Error message", + "details": "Error details" + } + ], + "created_by_id": "string", + "project_config": { + "dbt_version": "string", + "default_schema": "string", + "git_remote_url": "string", + "folder_path": "string", + "git_branch": "string", + "threads": 0, + "target_name": "string", + "environment_vars": [ + "string" + ], + "public_key": "string" + } + }` + tfmock.MockClient().Reset() + + getIteration := 0 + + transformationProjectResourceCreateMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects/project_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + getIteration = getIteration + 1 + if getIteration == 1 { + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, transformationProjectResponse)), nil + } else { + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, transformationProjectResponseReady)), nil + } + + }, + ) + + transformationProjectResourceCreateMockPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformation-projects").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + + tfmock.AssertKeyExistsAndHasValue(t, body, "group_id", "group_id") + tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version") + tfmock.AssertKeyExistsAndHasValue(t, body, "default_schema", "default_schema") + tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name") + varsFromRequest := body["environment_vars"].([]interface{}) + tfmock.AssertEqual(t, len(varsFromRequest), 1) + tfmock.AssertEqual(t, varsFromRequest[0], "environment_var") + + tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(1)) + tfmock.AssertKeyExistsAndHasValue(t, body, "type", "GIT") + + tfmock.AssertKeyExists(t, body, "project_config") + + config := body["project_config"].(map[string]interface{}) + + tfmock.AssertKeyExistsAndHasValue(t, config, "git_remote_url", "git_remote_url") + tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch") + tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") + + transformationProjectResourceCreateMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationProjectResourceCreateMockData), nil + }, + ) + + transformationProjectResourceCreateMockDeleteHandler = tfmock.MockClient().When(http.MethodDelete, + "/v1/transformation-projects/project_id", + ).ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationProjectResourceCreateMockData = nil + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", nil), nil + }, + ) +} + +func TestResourceTransformationProjectCreateMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch_1" + folder_path = "folder_path_1" + dbt_version = "string" + default_schema = "string" + threads = 0 + target_name = "string" + } + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectResourceCreateMockPostHandler.Interactions, 1) + tfmock.AssertEqual(t, transformationProjectResourceCreateMockGetModelsHandler.Interactions, 0) + tfmock.AssertNotEmpty(t, transformationProjectResourceCreateMockData) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationProjectResourceCreateTest(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectResourceCreateMockDeleteHandler.Interactions, 1) + tfmock.AssertEmpty(t, transformationProjectResourceCreateMockData) + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} From b7c1d4e2bf28ca50c100854c4733927e25343475 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Wed, 22 Jan 2025 16:21:56 +0100 Subject: [PATCH 02/13] transformation project --- fivetran/framework/provider.go | 1 + .../resources/transformation_project.go | 2 +- .../resources/transformation_project_test.go | 341 ++---------------- 3 files changed, 32 insertions(+), 312 deletions(-) diff --git a/fivetran/framework/provider.go b/fivetran/framework/provider.go index 9071dba7..77177c49 100644 --- a/fivetran/framework/provider.go +++ b/fivetran/framework/provider.go @@ -123,6 +123,7 @@ func (p *fivetranProvider) Resources(ctx context.Context) []func() resource.Reso resources.HybridDeploymentAgent, resources.DbtGitProjectConfig, resources.PrivateLink, + resources.TransformationProject, } } diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go index fef974f9..9fa5183f 100644 --- a/fivetran/framework/resources/transformation_project.go +++ b/fivetran/framework/resources/transformation_project.go @@ -30,7 +30,7 @@ func (r *transformationProject) Metadata(ctx context.Context, req resource.Metad } func (r *transformationProject) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = fivetranSchema.DbtProjectResource(ctx) + resp.Schema = fivetranSchema.TransformationProjectResource(ctx) } func (r *transformationProject) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { diff --git a/fivetran/framework/resources/transformation_project_test.go b/fivetran/framework/resources/transformation_project_test.go index a32d2315..467c0b73 100644 --- a/fivetran/framework/resources/transformation_project_test.go +++ b/fivetran/framework/resources/transformation_project_test.go @@ -3,6 +3,7 @@ package resources_test import ( "net/http" "testing" + "fmt" "github.com/fivetran/go-fivetran/tests/mock" tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" @@ -13,32 +14,22 @@ import ( var ( transformationProjectResourceMockGetHandler *mock.Handler transformationProjectResourceMockPostHandler *mock.Handler - transformationProjectResourceMockPatchHandler *mock.Handler transformationProjectResourceMockDeleteHandler *mock.Handler transformationProjectResourceMockData map[string]interface{} - - transformationProjectResourceCreateMockGetHandler *mock.Handler - transformationProjectResourceCreateMockGetModelsHandler *mock.Handler - transformationProjectResourceCreateMockPostHandler *mock.Handler - transformationProjectResourceCreateMockDeleteHandler *mock.Handler - transformationModelsDataSourceMockGetHandler *mock.Handler - - transformationModelsDataSourceMockData map[string]interface{} - transformationProjectResourceCreateMockData map[string]interface{} ) func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { transformationProjectResponse := ` { - "id": "string", + "id": "project_id", "type": "DBT_GIT", "status": "NOT_READY", "errors": [ "string" ], - "created_at": "2019-08-24T14:15:22Z", - "group_id": "string", + "created_at": "created_at", + "group_id": "group_id", "setup_tests": [ { "title": "Test Title", @@ -47,19 +38,19 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { "details": "Error details" } ], - "created_by_id": "string", + "created_by_id": "created_by_id", "project_config": { - "dbt_version": "string", - "default_schema": "string", - "git_remote_url": "string", - "folder_path": "string", - "git_branch": "string", + "dbt_version": "dbt_version", + "default_schema": "default_schema", + "git_remote_url": "git_remote_url", + "folder_path": "folder_path", + "git_branch": "git_branch", "threads": 0, - "target_name": "string", + "target_name": "target_name", "environment_vars": [ - "string" + "environment_var" ], - "public_key": "string" + "public_key": "public_key" } }` tfmock.MockClient().Reset() @@ -70,63 +61,21 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { }, ) - transformationProjectResourceMockPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/transformation-projects/project_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - body := tfmock.RequestBodyToJson(t, req) - - tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version_1") - tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name_1") - - varsFromRequest := body["environment_vars"].([]interface{}) - tfmock.AssertEqual(t, len(varsFromRequest), 1) - tfmock.AssertEqual(t, varsFromRequest[0], "environment_var_1") - - tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(2)) - - tfmock.AssertKeyExists(t, body, "project_config") - - config := body["project_config"].(map[string]interface{}) - - tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch_1") - tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path_1") - - for k, v := range body { - if k != "project_config" { - transformationProjectResourceMockData[k] = v - } else { - projectConfig := transformationProjectResourceMockData[k].(map[string]interface{}) - for ck, cv := range config { - projectConfig[ck] = cv - } - } - } - - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectResourceMockData), nil - }, - ) - transformationProjectResourceMockPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformation-projects").ThenCall( func(req *http.Request) (*http.Response, error) { body := tfmock.RequestBodyToJson(t, req) - tfmock.AssertKeyExistsAndHasValue(t, body, "group_id", "group_id") - tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version") - tfmock.AssertKeyExistsAndHasValue(t, body, "default_schema", "default_schema") - tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name") - varsFromRequest := body["environment_vars"].([]interface{}) - tfmock.AssertEqual(t, len(varsFromRequest), 1) - tfmock.AssertEqual(t, varsFromRequest[0], "environment_var") - - tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(1)) - tfmock.AssertKeyExistsAndHasValue(t, body, "type", "GIT") + tfmock.AssertKeyExistsAndHasValue(t, body, "type", "DBT_GIT") tfmock.AssertKeyExists(t, body, "project_config") - config := body["project_config"].(map[string]interface{}) - tfmock.AssertKeyExistsAndHasValue(t, config, "git_remote_url", "git_remote_url") tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch") tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") + tfmock.AssertKeyExistsAndHasValue(t, config, "dbt_version", "dbt_version") + tfmock.AssertKeyExistsAndHasValue(t, config, "default_schema", "default_schema") + tfmock.AssertKeyExistsAndHasValue(t, config, "target_name", "target_name") + transformationProjectResourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationProjectResourceMockData), nil @@ -154,12 +103,13 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { project_config { git_remote_url = "git_remote_url" - git_branch = "git_branch_1" - folder_path = "folder_path_1" - dbt_version = "string" - default_schema = "string" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" threads = 0 - target_name = "string" + target_name = "target_name" + environment_vars = ["environment_var"] } }`, @@ -172,64 +122,20 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { }, resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "DBT_GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.dbt_version", "dbt_version"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.public_key", "public_key"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.default_schema", "default_schema"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.target_name", "target_name"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), ), } - step2 := resource.TestStep{ - Config: ` - resource "fivetran_transformation_project" "project" { - provider = fivetran-provider - group_id = "group_id" - type = "DBT_GIT" - run_tests = true - - project_config { - git_remote_url = "git_remote_url" - git_branch = "git_branch_1" - folder_path = "folder_path_1" - dbt_version = "string" - default_schema = "string" - threads = 1 - target_name = "string" - } - }`, - - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationProjectResourceMockPatchHandler.Interactions, 1) - tfmock.AssertEqual(t, transformationProjectResourceMockGetHandler.Interactions, 2) - tfmock.AssertNotEmpty(t, transformationProjectResourceMockData) - return nil - }, - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version_1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name_1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var_1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "2"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch_1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path_1"), - ), - } - resource.Test( t, resource.TestCase{ @@ -240,193 +146,6 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { CheckDestroy: func(s *terraform.State) error { return nil }, - Steps: []resource.TestStep{ - step1, - step2, - }, - }, - ) -} - -func setupMockClientTransformationProjectResourceCreateTest(t *testing.T) { - - transformationProjectResponse := ` -{ - "id": "string", - "type": "DBT_GIT", - "status": "NOT_READY", - "errors": [ - "string" - ], - "created_at": "2019-08-24T14:15:22Z", - "group_id": "string", - "setup_tests": [ - { - "title": "Test Title", - "status": "FAILED", - "message": "Error message", - "details": "Error details" - } - ], - "created_by_id": "string", - "project_config": { - "dbt_version": "string", - "default_schema": "string", - "git_remote_url": "string", - "folder_path": "string", - "git_branch": "string", - "threads": 0, - "target_name": "string", - "environment_vars": [ - "string" - ], - "public_key": "string" - } - }` - - transformationProjectResponseReady := ` -{ - "id": "string", - "type": "DBT_GIT", - "status": "NOT_READY", - "errors": [ - "string" - ], - "created_at": "2019-08-24T14:15:22Z", - "group_id": "string", - "setup_tests": [ - { - "title": "Test Title", - "status": "FAILED", - "message": "Error message", - "details": "Error details" - } - ], - "created_by_id": "string", - "project_config": { - "dbt_version": "string", - "default_schema": "string", - "git_remote_url": "string", - "folder_path": "string", - "git_branch": "string", - "threads": 0, - "target_name": "string", - "environment_vars": [ - "string" - ], - "public_key": "string" - } - }` - tfmock.MockClient().Reset() - - getIteration := 0 - - transformationProjectResourceCreateMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects/project_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - getIteration = getIteration + 1 - if getIteration == 1 { - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, transformationProjectResponse)), nil - } else { - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, transformationProjectResponseReady)), nil - } - - }, - ) - - transformationProjectResourceCreateMockPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformation-projects").ThenCall( - func(req *http.Request) (*http.Response, error) { - body := tfmock.RequestBodyToJson(t, req) - - tfmock.AssertKeyExistsAndHasValue(t, body, "group_id", "group_id") - tfmock.AssertKeyExistsAndHasValue(t, body, "transformation_version", "transformation_version") - tfmock.AssertKeyExistsAndHasValue(t, body, "default_schema", "default_schema") - tfmock.AssertKeyExistsAndHasValue(t, body, "target_name", "target_name") - varsFromRequest := body["environment_vars"].([]interface{}) - tfmock.AssertEqual(t, len(varsFromRequest), 1) - tfmock.AssertEqual(t, varsFromRequest[0], "environment_var") - - tfmock.AssertKeyExistsAndHasValue(t, body, "threads", float64(1)) - tfmock.AssertKeyExistsAndHasValue(t, body, "type", "GIT") - - tfmock.AssertKeyExists(t, body, "project_config") - - config := body["project_config"].(map[string]interface{}) - - tfmock.AssertKeyExistsAndHasValue(t, config, "git_remote_url", "git_remote_url") - tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch") - tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") - - transformationProjectResourceCreateMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) - return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationProjectResourceCreateMockData), nil - }, - ) - - transformationProjectResourceCreateMockDeleteHandler = tfmock.MockClient().When(http.MethodDelete, - "/v1/transformation-projects/project_id", - ).ThenCall( - func(req *http.Request) (*http.Response, error) { - transformationProjectResourceCreateMockData = nil - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", nil), nil - }, - ) -} - -func TestResourceTransformationProjectCreateMock(t *testing.T) { - step1 := resource.TestStep{ - Config: ` - resource "fivetran_transformation_project" "project" { - provider = fivetran-provider - group_id = "group_id" - type = "DBT_GIT" - run_tests = true - - project_config { - git_remote_url = "git_remote_url" - git_branch = "git_branch_1" - folder_path = "folder_path_1" - dbt_version = "string" - default_schema = "string" - threads = 0 - target_name = "string" - } - }`, - - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationProjectResourceCreateMockPostHandler.Interactions, 1) - tfmock.AssertEqual(t, transformationProjectResourceCreateMockGetModelsHandler.Interactions, 0) - tfmock.AssertNotEmpty(t, transformationProjectResourceCreateMockData) - return nil - }, - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "transformation_version", "transformation_version"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "public_key", "public_key"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "default_schema", "default_schema"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "target_name", "target_name"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "environment_vars.0", "environment_var"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "threads", "1"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "GIT"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), - ), - } - - resource.Test( - t, - resource.TestCase{ - PreCheck: func() { - setupMockClientTransformationProjectResourceCreateTest(t) - }, - ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, - CheckDestroy: func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationProjectResourceCreateMockDeleteHandler.Interactions, 1) - tfmock.AssertEmpty(t, transformationProjectResourceCreateMockData) - return nil - }, Steps: []resource.TestStep{ step1, }, From 55ee969f0069b21212921dd11afbcbba7a895302 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Wed, 22 Jan 2025 18:12:50 +0100 Subject: [PATCH 03/13] projects --- docs/data-sources/quickstart_package.md | 33 + docs/data-sources/quickstart_packages.md | 29 + docs/data-sources/transformation_projects.md | 15 + docs/data-sources/user.md | 35 - .../user_connector_memberships.md | 35 - docs/data-sources/user_group_memberships.md | 35 - docs/data-sources/users.md | 41 - docs/data-sources/webhook.md | 34 - docs/data-sources/webhooks.md | 40 - docs/guides/connector_setup.md | 142 - docs/guides/dbt_private_git_deploy_key.md | 62 - docs/guides/dbt_transformation.md | 92 - docs/guides/schema_json.md | 53 - docs/guides/schema_setup.md | 100 - docs/guides/version_0.7.2_update_guides.md | 108 - docs/guides/version_1.1.18_update_guides.md | 98 - docs/guides/version_1.3.0_update_guides.md | 116 - docs/guides/version_1.4.0_update_guides.md | 104 - docs/index.md | 51 - docs/resources/connector.md | 3961 ----------------- docs/resources/connector_certificates.md | 46 - docs/resources/connector_fingerprints.md | 79 - docs/resources/connector_schedule.md | 73 - docs/resources/connector_schema_config.md | 280 -- docs/resources/dbt_git_project_config.md | 64 - docs/resources/dbt_project.md | 104 - docs/resources/dbt_transformation.md | 93 - docs/resources/destination.md | 486 -- docs/resources/destination_certificates.md | 46 - docs/resources/destination_fingerprints.md | 79 - docs/resources/external_logging.md | 95 - docs/resources/group.md | 58 - docs/resources/group_users.md | 78 - docs/resources/hybrid_deployment_agent.md | 41 - docs/resources/local_processing_agent.md | 54 - docs/resources/private_link.md | 55 - docs/resources/proxy_agent.md | 35 - docs/resources/team.md | 61 - docs/resources/team_connector_membership.md | 82 - docs/resources/team_group_membership.md | 82 - docs/resources/team_user_membership.md | 76 - docs/resources/user.md | 67 - docs/resources/user_connector_membership.md | 78 - docs/resources/user_group_membership.md | 78 - docs/resources/webhook.md | 69 - .../core/model/quickstart_package.go | 35 + .../core/model/quickstart_packages.go | 62 + .../core/model/transformation_project.go | 68 +- .../core/model/transformation_projects.go | 40 + .../core/schema/quickstart_packages.go | 70 + .../core/schema/transformation_project.go | 59 +- .../datasources/quickstart_package.go | 58 + .../datasources/quickstart_package_test.go | 81 + .../datasources/quickstart_packages.go | 84 + .../datasources/quickstart_packages_test.go | 98 + .../datasources/transformation_project.go | 58 + .../transformation_project_test.go | 108 + .../datasources/transformation_projects.go | 83 + .../transformation_projects_test.go | 93 + fivetran/framework/provider.go | 4 + .../resources/transformation_project.go | 10 +- .../resources/transformation_project_test.go | 1 - .../data-sources/quickstart_package.md.tmpl | 17 + .../data-sources/quickstart_packages.md.tmpl | 17 + .../transformation_projects.md.tmpl | 16 + .../resources/transformation_project.md.tmpl | 59 + 66 files changed, 1174 insertions(+), 7490 deletions(-) create mode 100644 docs/data-sources/quickstart_package.md create mode 100644 docs/data-sources/quickstart_packages.md create mode 100644 docs/data-sources/transformation_projects.md delete mode 100644 docs/data-sources/user.md delete mode 100644 docs/data-sources/user_connector_memberships.md delete mode 100644 docs/data-sources/user_group_memberships.md delete mode 100644 docs/data-sources/users.md delete mode 100644 docs/data-sources/webhook.md delete mode 100644 docs/data-sources/webhooks.md delete mode 100644 docs/guides/connector_setup.md delete mode 100644 docs/guides/dbt_private_git_deploy_key.md delete mode 100644 docs/guides/dbt_transformation.md delete mode 100644 docs/guides/schema_json.md delete mode 100644 docs/guides/schema_setup.md delete mode 100644 docs/guides/version_0.7.2_update_guides.md delete mode 100644 docs/guides/version_1.1.18_update_guides.md delete mode 100644 docs/guides/version_1.3.0_update_guides.md delete mode 100644 docs/guides/version_1.4.0_update_guides.md delete mode 100644 docs/index.md delete mode 100644 docs/resources/connector.md delete mode 100644 docs/resources/connector_certificates.md delete mode 100644 docs/resources/connector_fingerprints.md delete mode 100644 docs/resources/connector_schedule.md delete mode 100644 docs/resources/connector_schema_config.md delete mode 100644 docs/resources/dbt_git_project_config.md delete mode 100644 docs/resources/dbt_project.md delete mode 100644 docs/resources/dbt_transformation.md delete mode 100644 docs/resources/destination.md delete mode 100644 docs/resources/destination_certificates.md delete mode 100644 docs/resources/destination_fingerprints.md delete mode 100644 docs/resources/external_logging.md delete mode 100644 docs/resources/group.md delete mode 100644 docs/resources/group_users.md delete mode 100644 docs/resources/hybrid_deployment_agent.md delete mode 100644 docs/resources/local_processing_agent.md delete mode 100644 docs/resources/private_link.md delete mode 100644 docs/resources/proxy_agent.md delete mode 100644 docs/resources/team.md delete mode 100644 docs/resources/team_connector_membership.md delete mode 100644 docs/resources/team_group_membership.md delete mode 100644 docs/resources/team_user_membership.md delete mode 100644 docs/resources/user.md delete mode 100644 docs/resources/user_connector_membership.md delete mode 100644 docs/resources/user_group_membership.md delete mode 100644 docs/resources/webhook.md create mode 100644 fivetran/framework/core/model/quickstart_package.go create mode 100644 fivetran/framework/core/model/quickstart_packages.go create mode 100644 fivetran/framework/core/model/transformation_projects.go create mode 100644 fivetran/framework/core/schema/quickstart_packages.go create mode 100644 fivetran/framework/datasources/quickstart_package.go create mode 100644 fivetran/framework/datasources/quickstart_package_test.go create mode 100644 fivetran/framework/datasources/quickstart_packages.go create mode 100644 fivetran/framework/datasources/quickstart_packages_test.go create mode 100644 fivetran/framework/datasources/transformation_project.go create mode 100644 fivetran/framework/datasources/transformation_project_test.go create mode 100644 fivetran/framework/datasources/transformation_projects.go create mode 100644 fivetran/framework/datasources/transformation_projects_test.go create mode 100644 templates/data-sources/quickstart_package.md.tmpl create mode 100644 templates/data-sources/quickstart_packages.md.tmpl create mode 100644 templates/data-sources/transformation_projects.md.tmpl create mode 100644 templates/resources/transformation_project.md.tmpl diff --git a/docs/data-sources/quickstart_package.md b/docs/data-sources/quickstart_package.md new file mode 100644 index 00000000..51320e79 --- /dev/null +++ b/docs/data-sources/quickstart_package.md @@ -0,0 +1,33 @@ +--- +page_title: "Data Source: fivetran_quickstart_package" +--- + +# Data Source: fivetran_quickstart_package + +This data source returns the metadata details of the Quickstart transformation package if a valid identifier is provided + +## Example Usage + +```hcl +data "fivetran_quickstart_package" "test" { + id = "id" +} +``` + + +## Schema + +### Read-Only + +- `packages` (Block List) (see [below for nested schema](#nestedblock--packages)) + + +### Nested Schema for `packages` + +Read-Only: + +- `connector_types` (Set of String) The set of connector types +- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system +- `name` (String) The Quickstart transformation package name +- `output_model_names` (Set of String) The list of transformation output models +- `version` (String) The Quickstart package definition version \ No newline at end of file diff --git a/docs/data-sources/quickstart_packages.md b/docs/data-sources/quickstart_packages.md new file mode 100644 index 00000000..8cde78c9 --- /dev/null +++ b/docs/data-sources/quickstart_packages.md @@ -0,0 +1,29 @@ +--- +page_title: "Data Source: fivetran_quickstart_packages" +--- + +# Data Source: fivetran_quickstart_packages + +Returns a list of available Quickstart transformation package metadata details + +## Example Usage + +```hcl +data "fivetran_quickstart_packages" "test" { + id = "id" +} +``` + + +## Schema + +### Required + +- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system + +### Read-Only + +- `connector_types` (Set of String) The set of connector types +- `name` (String) The Quickstart transformation package name +- `output_model_names` (Set of String) The list of transformation output models +- `version` (String) The Quickstart package definition version \ No newline at end of file diff --git a/docs/data-sources/transformation_projects.md b/docs/data-sources/transformation_projects.md new file mode 100644 index 00000000..8e12764d --- /dev/null +++ b/docs/data-sources/transformation_projects.md @@ -0,0 +1,15 @@ +--- +page_title: "Data Source: fivetran_transformation_projects" +--- + +# Data Source: fivetran_transformation_projects + +Returns a list of all transformation projects available via API within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_transformation_projects" "test" { +} +``` + diff --git a/docs/data-sources/user.md b/docs/data-sources/user.md deleted file mode 100644 index 48e604a7..00000000 --- a/docs/data-sources/user.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -page_title: "Data Source: fivetran_user" ---- - -# Data Source: fivetran_user - -This data source returns a user object. - -## Example Usage - -```hcl -data "fivetran_user" "my_user" { - id = "anonymous_mystery" -} -``` - - -## Schema - -### Required - -- `id` (String) The unique identifier for the user within the Fivetran system. - -### Read-Only - -- `created_at` (String) The timestamp that the user created their Fivetran account. -- `email` (String) The email address that the user has associated with their user profile. -- `family_name` (String) The last name of the user. -- `given_name` (String) The first name of the user. -- `invited` (Boolean) The field indicates whether the user has been invited to your account. -- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. -- `phone` (String) The phone number of the user. -- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') -- `role` (String) The role that you would like to assign to the user. -- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. \ No newline at end of file diff --git a/docs/data-sources/user_connector_memberships.md b/docs/data-sources/user_connector_memberships.md deleted file mode 100644 index 1c3f74c8..00000000 --- a/docs/data-sources/user_connector_memberships.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -page_title: "Data Source: fivetran_user_connector_memberships" ---- - -# Data Source: fivetran_user_connector_memberships - -This data source returns a connector membership for user. - -## Example Usage - -```hcl -data "fivetran_user_connector_memberships" "user_connector_membership" { - user_id = "user_id" -} -``` - - -## Schema - -### Required - -- `user_id` (String) The unique identifier for the user within your account. - -### Read-Only - -- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) - - -### Nested Schema for `connector` - -Read-Only: - -- `connector_id` (String) The connector unique identifier -- `created_at` (String) The date and time the membership was created -- `role` (String) The user's role that links the user and the connector \ No newline at end of file diff --git a/docs/data-sources/user_group_memberships.md b/docs/data-sources/user_group_memberships.md deleted file mode 100644 index c1532c5c..00000000 --- a/docs/data-sources/user_group_memberships.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -page_title: "Data Source: fivetran_user_group_memberships" ---- - -# Data Source: fivetran_user_group_memberships - -This data source returns a list of group memberships for user. - -## Example Usage - -```hcl -data "fivetran_user_group_memberships" "team_group_memberships" { - user_id = "user_id" -} -``` - - -## Schema - -### Required - -- `user_id` (String) The unique identifier for the user within your account. - -### Read-Only - -- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) - - -### Nested Schema for `group` - -Read-Only: - -- `created_at` (String) The date and time the membership was created -- `group_id` (String) The group unique identifier -- `role` (String) The user's role that links the user and the group \ No newline at end of file diff --git a/docs/data-sources/users.md b/docs/data-sources/users.md deleted file mode 100644 index 0ef8b4ea..00000000 --- a/docs/data-sources/users.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -page_title: "Data Source: fivetran_users" ---- - -# Data Source: fivetran_users - -This data source returns a list of all users within your Fivetran account. - -## Example Usage - -```hcl -data "fivetran_users" "users" { -} -``` - - -## Schema - -### Optional - -- `id` (String) The ID of this resource. - -### Read-Only - -- `users` (Block Set) (see [below for nested schema](#nestedblock--users)) - - -### Nested Schema for `users` - -Read-Only: - -- `created_at` (String) The timestamp that the user created their Fivetran account -- `email` (String) The email address that the user has associated with their user profile. -- `family_name` (String) The last name of the user. -- `given_name` (String) The first name of the user. -- `id` (String) The unique identifier for the user within your account. -- `invited` (Boolean) The field indicates whether the user has been invited to your account. -- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. -- `phone` (String) The phone number of the user. -- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') -- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. \ No newline at end of file diff --git a/docs/data-sources/webhook.md b/docs/data-sources/webhook.md deleted file mode 100644 index f3d3394c..00000000 --- a/docs/data-sources/webhook.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -page_title: "Data Source: fivetran_webhook" ---- - -# Data Source: fivetran_webhook - -This data source returns a webhook object. - -## Example Usage - -```hcl -data "fivetran_webhook" "webhook" { - id = "webhook_id" -} -``` - - -## Schema - -### Required - -- `id` (String) The webhook ID - -### Read-Only - -- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events -- `created_at` (String) The webhook creation timestamp -- `created_by` (String) The ID of the user who created the webhook. -- `events` (Set of String) The array of event types -- `group_id` (String) The group ID -- `run_tests` (Boolean) Specifies whether the setup tests should be run -- `secret` (String) The secret string used for payload signing and masked in the response. -- `type` (String) The webhook type (group, account) -- `url` (String) Your webhooks URL endpoint for your application \ No newline at end of file diff --git a/docs/data-sources/webhooks.md b/docs/data-sources/webhooks.md deleted file mode 100644 index 3cede41f..00000000 --- a/docs/data-sources/webhooks.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -page_title: "Data Source: fivetran_webhooks" ---- - -# Data Source: fivetran_webhooks - -This data source returns a list of all webhooks within your Fivetran account. - -## Example Usage - -```hcl -data "fivetran_webhooks" "webhooks" { -} -``` - - -## Schema - -### Read-Only - -- `webhooks` (Attributes Set) (see [below for nested schema](#nestedatt--webhooks)) - - -### Nested Schema for `webhooks` - -Required: - -- `id` (String) The webhook ID - -Read-Only: - -- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events -- `created_at` (String) The webhook creation timestamp -- `created_by` (String) The ID of the user who created the webhook. -- `events` (Set of String) The array of event types -- `group_id` (String) The group ID -- `run_tests` (Boolean) Specifies whether the setup tests should be run -- `secret` (String) The secret string used for payload signing and masked in the response. -- `type` (String) The webhook type (group, account) -- `url` (String) Your webhooks URL endpoint for your application \ No newline at end of file diff --git a/docs/guides/connector_setup.md b/docs/guides/connector_setup.md deleted file mode 100644 index c7c074d3..00000000 --- a/docs/guides/connector_setup.md +++ /dev/null @@ -1,142 +0,0 @@ ----- -page_title: "Initial Setup" -subcategory: "Getting Started" ---- - -# How to set up your Fivetran environment using Terraform - -In this guide, we will set up a simple pipeline with one source using Fivetran Terraform Provider. - -## Provider setup - -First of all, you need to get your [Fivetran API Key and Secret](https://fivetran.com/docs/rest-api/getting-started#gettingstarted) and save it into environment variables: - -```bash -export FIVETRAN_APIKEY= -export FIVETRAN_APISECRET= -``` - -```hcl -# Terraform 0.13+ uses the Terraform Registry: - -terraform { - required_providers { - fivetran = { - version = ">= 1.0.0" - source = "fivetran/fivetran" - } - } -} - -# Configure the Fivetran provider -provider "fivetran" { -# We recommend to use environment variables `FIVETRAN_APIKEY` and `FIVETRAN_APISECRET` instead of explicit assignment -# api_key = var.fivetran_api_key -# api_secret = var.fivetran_api_secret -} - -# Terraform 0.12- can be specified as: - -# Configure the Fivetran provider -# provider "fivetran" { -# api_key = "${var.fivetran_api_key}" -# api_secret = "${var.fivetran_api_secret}" -# } -``` - -## Add your group and destination - -The root resource for your Fivetran infrastructure setup is always `Destination group`. First of all, you need to set up the group: - -```hcl -resource "fivetran_group" "group" { - name = "MyGroup" -} -``` - -Once you have created the group, you need to associate a `Destination` with it: - -```hcl -resource "fivetran_destination" "destination" { - group_id = fivetran_group.group.id - service = "postgres_rds_warehouse" - time_zone_offset = "0" - region = "GCP_US_EAST4" - trust_certificates = "true" - trust_fingerprints = "true" - daylight_saving_time_enabled = "true" - run_setup_tests = "true" - - config { - host = "destination.host" - port = 5432 - user = "postgres" - password = "myPassword" - database = "myDatabaseName" - connection_type = "Directly" - } - - # setup tests operation could take time - # you can define custom timeout for create and update - # default values for destination resource is 30 minutes - timeouts { - create = "60m" - update = "60m" - } -} -``` - -## Add your first connector - -We are now ready to set up our first connector: - -```hcl -resource "fivetran_connector" "connector" { - group_id = fivetran_group.group.id - service = "fivetran_log" - run_setup_tests = true - - destination_schema { - name = "my_fivetran_log_connector" - } - - config { - is_account_level_connector = "false" - } - - # setup tests operation could take time - # you can define custom timeout for create and update - # default values for connector resource is 30 minutes - timeouts { - create = "60m" - update = "60m" - } - - depends_on = [ - fivetran_destination.destination - ] -} -``` - -## Configure connector schedule - -We should configure how connector will be scheduled to sync: - -```hcl -resource "fivetran_connector_schedule" "connector_schedule" { - connector_id = fivetran_connector.connector.id - sync_frequency = 60 - paused = false - pause_after_trial = false -} -``` - -Now we are ready to apply our configuration: - -```bash -terraform apply -``` - -## Example configuration - -An example .tf file with the configuration could be found [here](https://github.com/fivetran/terraform-provider-fivetran/tree/main/config-examples/connector_setup.tf). \ No newline at end of file diff --git a/docs/guides/dbt_private_git_deploy_key.md b/docs/guides/dbt_private_git_deploy_key.md deleted file mode 100644 index f85e87e9..00000000 --- a/docs/guides/dbt_private_git_deploy_key.md +++ /dev/null @@ -1,62 +0,0 @@ ----- -page_title: "Dbt Project Setup With Git Private Repo" -subcategory: "Getting Started" ---- - -# How to set up a dbt Project with private Git Repo. - -To be able to use private dbt Project Git repository you have to grant Fivetran access to this repo. -To do that you need to add a Deploy Key to your repository. -To get SSH key from Fivetran create `fivetran_dbt_project` resource: - -```hcl -resource "fivetran_group" "my_group" { - name = "My_Group" -} - -resource "fivetran_dbt_project" "project" { - group_id = fivetran_group.my_group.id - dbt_version = "1.3.2" - threads = 1 - default_schema = "your_project_default_schema" - type = "GIT" -} -``` - -Then you need to set up the dbt Project public key (field `public_key` in created resource) as a deploy key into your repo using: - -[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): -```hcl -resource "github_repository_deploy_key" "example_repository_deploy_key" { - title = "Repository test key" - repository = "repo-owner/repo-name" - key = fivetran_dbt_project.test_project.public_key - read_only = true -} -``` - -or - -[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) -```hcl -resource "bitbucket_deploy_key" "test" { - workspace = "repo-owner" - repository = "repo-name" - key = fivetran_dbt_project.test_project.public_key - label = "Repository test key" -} -``` - -Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. - -And after that you can configure your project in `fivetran_dbt_git_project_config` resource: - -```hcl -resource "fivetran_dbt_git_project_config" "project_config" { - id = fivetran_dbt_project.project.id - - git_remote_url = "git@github.com:repo-owner/repo-name.git" - git_branch = "main" -} -``` - diff --git a/docs/guides/dbt_transformation.md b/docs/guides/dbt_transformation.md deleted file mode 100644 index b95b1d99..00000000 --- a/docs/guides/dbt_transformation.md +++ /dev/null @@ -1,92 +0,0 @@ ----- -page_title: "Dbt Project and Transformation Setup" -subcategory: "Getting Started" ---- - -# How to set up a dbt Project and Transformation schedule. - -In this guide, we will set up a simple pipeline with one dbt Transformation using Fivetran Terraform Provider. - -## Prerequisites - -To create a project you need to have a group and destination. - -You can use existing ones, or configure a new ones using terraform: - -```hcl -resource "fivetran_group" "group" { - name = "MyGroup" -} -``` - -Once you have created the group, you need to associate a `Destination` with it: - -```hcl -resource "fivetran_destination" "destination" { - group_id = fivetran_group.group.id - service = "postgres_rds_warehouse" - time_zone_offset = "0" - region = "GCP_US_EAST4" - trust_certificates = "true" - trust_fingerprints = "true" - run_setup_tests = "true" - - config { - host = "destination.host" - port = 5432 - user = "postgres" - password = "myPassword" - database = "myDatabaseName" - connection_type = "Directly" - } -} -``` - --> Note: you destination need to have `connected` status before dbt Project setup. - -## Add `fivetran_dbt_project` resource. - -Follow our [dbt Project setup guide](https://fivetran.com/docs/transformations/dbt/setup-guide#prerequisites) to complete prerequisites for project creation. -After that let's configure dbt Project resource: - -```hcl -resource "fivetran_dbt_project" "project" { - group_id = fivetran_destination.destination.id - dbt_version = "1.3.2" - threads = 1 - default_schema = "your_project_default_schema" - type = "GIT" - project_config { - git_remote_url = "git@github.com:your_project_git_remote.git" - git_branch = "main" - } -} -``` - -Project creation and initialization takes time, so it's OK if resource creation takes 7-10 minutes. - -## Configure your dbt Transformation schedule - -You can configure your first Fivetran dbt Transformation with `fivetran_dbt_transformation` resource: - -```hcl -resource "fivetran_dbt_transformation" "test_transformation" { - dbt_project_id = fivetran_dbt_project.project.id - dbt_model_name = "your_dbt_model_name" - paused = false - run_tests = false - schedule { - schedule_type = "INTERVAL" - days_of_week = ["MONDAY"] - interval = 60 - } -} -``` - -Above consfiguration will schedule model with name `your_dbt_model_name` on mondays each 60 minutes. - -Now we are ready to apply our configuration: - -```bash -terraform apply -``` \ No newline at end of file diff --git a/docs/guides/schema_json.md b/docs/guides/schema_json.md deleted file mode 100644 index 79b0d866..00000000 --- a/docs/guides/schema_json.md +++ /dev/null @@ -1,53 +0,0 @@ ----- -page_title: "Using schemas_json field" -subcategory: "Getting Started" ---- - -# How to set up Fivetran connector schema config using Terraform in `.json` format. - -In cases when schema configuration is really big and you have to define more that 1000 tables settings it's better to set schema settings directly using `.json` file: - -File `schema-config.json`: -```json -{ - "schema_0": { - "enabled": true, - "some_random_extra_field": "extra_value", - "tables": { - "table_0": { - "some_random_extra_field": "extra_value", - "enabled": true - }, - ... - } - }, - "schema_2": { - "enabled": true, - "some_random_extra_field": "extra_value", - "tables": { - "table_0": { - "some_random_extra_field": "extra_value", - "enabled": true, - "columns": { - "column_1": { - "enabled": false - } - } - }, - ... - } - }, - ... -} -``` - -Configuration `.tf` file: -```hcl -resource "fivetran_connector_schema_config" "test_schema" { - provider = fivetran-provider - connector_id = "connector_id" - schema_change_handling = "ALLOW_COLUMNS" - schemas_json = file("path/to/schema-config.json") -} -``` --> NOTE: Please make sure that the `enabled` field inside the JSON is set to boolean data type. \ No newline at end of file diff --git a/docs/guides/schema_setup.md b/docs/guides/schema_setup.md deleted file mode 100644 index abfffdb1..00000000 --- a/docs/guides/schema_setup.md +++ /dev/null @@ -1,100 +0,0 @@ ----- -page_title: "Connector Schema Setup" -subcategory: "Getting Started" ---- - -# How to set up Fivetran connector schema config using Terraform - -In this guide, we will set up a simple pipeline with one connector and schema using Fivetran Terraform Provider. - -## Create a connector resource - -Create the `fivetran_connector` resource: - -```hcl -resource "fivetran_connector" "connector" { - ... - run_setup_tests = "true" # it is necessary to authorise connector -} -``` - -Connector will be in the paused state, but ready to sync. - --> Connector should be **authorized** to be able to fetch schema from source. Set `run_setup_tests = "true"`. - -## Set up connector schema config - -Let's define what exactly we want to sync by using the `fivetran_connector_schema_config` resource: - -```hcl -resource "fivetran_connector_schema_config" "connector_schema" { - connector_id = fivetran_connector.connector.id - schema_change_handling = "BLOCK_ALL" - schemas = { - "my_fivetran_log_connector" = { - enabled = true - tables = { - "log" = { - enabled = true - columns = { - "event" = { - enabled = true - } - "message_data" = { - enabled = true - } - "message_event" = { - enabled = true - } - "sync_id" = { - enabled = true - } - } - } - } - } - } - # before applying schema resource will trigger "Reload connector schema config" endpoint - # it could take time for slow sources or for source with huge connector_schema_setup - # to prevent timeouts you can set custom timeouts - timeouts { - create = "6h" - read = "6h" - update = "6h" - } - # if you not sure in timing you can set timeouts to 0 - it means `no timeout` - # WARNING: not recommended - this could lead to unpredictable apply process hanging - #timeouts { - # create = "0" - # read = "0" - # update = "0" - #} -} -``` - -## Set up connector schedule configuration - --> The schedule should depend on the schema resource to enable the connector **after** the schema changes are applied. - -```hcl -resource "fivetran_connector_schedule" "my_connector_schedule" { - connector_id = fivetran_connector_schema_config.connector_schema.id - - sync_frequency = "5" - - paused = false - pause_after_trial = true - - schedule_type = "auto" -} -``` - -## Apply configuration - -```bash -terraform apply -``` - -## Example configuration - -An example .tf file with the configuration could be found [here](https://github.com/fivetran/terraform-provider-fivetran/tree/main/config-examples/connector_schema_setup.tf). \ No newline at end of file diff --git a/docs/guides/version_0.7.2_update_guides.md b/docs/guides/version_0.7.2_update_guides.md deleted file mode 100644 index 0ca86752..00000000 --- a/docs/guides/version_0.7.2_update_guides.md +++ /dev/null @@ -1,108 +0,0 @@ ----- -page_title: "Version Update 0.7.2" -subcategory: "Upgrade Guides" ---- - -# Version 0.7.2 - -## What's new in 0.7.2 - -In version `0.7.2` of Fivetran Terraform provider, resource `fivetran_connector` is separated onto two resources: -- `fivetran_connector` resource -- `fivetran_connector_schedule` resource -With this new structure, it's now possible to create a connector, define the schema config for it, and enable it in one `apply` cycle without intermediate stages. -Before this version, you had to "un-pause" connector after applying initial schema configuration with additional `apply` to avoid unneeded data to be synced. - -## Migration guide - -### Provider - -Update your provider configuration in the following way: - -Previous configuration: - -```hcl -required_providers { - fivetran = { - version = "~> 0.7.1" - source = "fivetran/fivetran" - } - } -``` - -Updated configuration: - -```hcl -required_providers { - fivetran = { - version = ">= 0.7.2" - source = "fivetran/fivetran" - } - } -``` - -### Resource `fivetran_connector` - -Update all your connector resources (`fivetran_connector`): - -Previous configuration: - -```hcl -resource "fivetran_connector" "test_connector" { - - group_id = "worker_tennis" - service = "fivetran_log" - - destination_schema { - name = "fivetran_log_schema" - } - - sync_frequency = "1440" - daily_sync_time = "6:00" - paused = false - pause_after_trial = false - - run_setup_tests = true - config { - group_name = "worker_tennis" - } -} -``` - -Updated configuration: - -```hcl -resource "fivetran_connector" "test_connector" { - group_id = "worker_tennis" - service = "fivetran_log" - - destination_schema { - name = "fivetran_log_schema" - } - - run_setup_tests = true - - config { - group_name = "worker_tennis" - } -} -resource "fivetran_connector_schedule" "test_connector_schedule" { - connector_id = fivetran_connector.test_connector.id - - sync_frequency = "1440" - daily_sync_time = "6:00" - paused = false - pause_after_trial = false - - schedule_type = "auto" -} - -``` - -### Update terraform state - -Once all configurations have been updated, run: - -``` -terraform init -upgrade -``` \ No newline at end of file diff --git a/docs/guides/version_1.1.18_update_guides.md b/docs/guides/version_1.1.18_update_guides.md deleted file mode 100644 index 2a97c8ef..00000000 --- a/docs/guides/version_1.1.18_update_guides.md +++ /dev/null @@ -1,98 +0,0 @@ ----- -page_title: "Version Update 1.1.18" -subcategory: "Upgrade Guides" ---- - -# Version 1.1.18 - -## What's new in 1.1.18 - -In version `1.1.18` of Fivetran Terraform provider, resource `fivetran_connector_schema_config` behavior changed: -- If no columns settings specified in `table.columns` no settings will be applied. If table enabled - columns won't be blocked automatically by `BLOCK_ALL` policy. -- Settings for sub-elements won't be managed if root element disabled: for `BLOCK_ALL` policy for disabled schema no settings for tables/columns will be applied. - -## Migration guide - -### Provider - -Update your provider configuration in the following way: - -Previous configuration: - -```hcl -required_providers { - fivetran = { - version = "~> 1.1.17" - source = "fivetran/fivetran" - } - } -``` - -Updated configuration: - -```hcl -required_providers { - fivetran = { - version = ">= 1.1.18" - source = "fivetran/fivetran" - } - } -``` - -### Resource `fivetran_connector_schema_config` - -Update all your connector schema config resources (`fivetran_connector_schema_config`): - -Previous configuration: - -```hcl -resource "fivetran_connector_schema_config" "test_schema" { - connector_id = "connector_id" - schema_change_handling = "ALLOW_ALL" - - schema { - name = "schema_name" - table { - name = "table_name" - sync_mode = "HISTORY" - column { - name = "hashed_column_name" - hashed = "true" - } - } - } -} -``` - -Updated configuration: - -```hcl -resource "fivetran_connector_schema_config" "test_schema" { - connector_id = "connector_id" - schema_change_handling = "ALLOW_ALL" - - schemas = { - "schema_name" = { - tables = { - "table_name" = { - sync_mode = "HISTORY" - columns = { - "hashed_column_name" = { - hashed = true - } - } - } - } - } - } -} - -``` - -### Update terraform state - -Once all configurations have been updated, run: - -``` -terraform init -upgrade -``` \ No newline at end of file diff --git a/docs/guides/version_1.3.0_update_guides.md b/docs/guides/version_1.3.0_update_guides.md deleted file mode 100644 index fdca57a1..00000000 --- a/docs/guides/version_1.3.0_update_guides.md +++ /dev/null @@ -1,116 +0,0 @@ ----- -page_title: "Version Update 1.3.0" -subcategory: "Upgrade Guides" ---- - -# Version 1.3.0 - -## What's new in 1.3.0 - -In version `1.3.0` of Fivetran Terraform provider, resource `fivetran_dbt_project` behavior changed: -- installation of the DBT project configuration should now occur in a separate resource `fivetran_dbt_git_project_config`, after installing the key in the repository - -## Migration guide - -### Provider - -Update your provider configuration in the following way: - -Previous configuration: - -```hcl -required_providers { - fivetran = { - version = "~> 1.2.8" - source = "fivetran/fivetran" - } - } -``` - -Updated configuration: - -```hcl -required_providers { - fivetran = { - version = ">= 1.3.0" - source = "fivetran/fivetran" - } - } -``` - -### Resource `fivetran_dbt_project` - -Update all your connector schema config resources (`fivetran_dbt_project`): - -Previous configuration: - -```hcl -resource "fivetran_dbt_project" "test_project" { - provider = fivetran-provider - group_id = fivetran_destination.test_destination.id - dbt_version = "1.0.1" - threads = 1 - default_schema = "dbt_demo_test_e2e_terraform" - type = "GIT" - project_config { - folder_path = "/folder/path" - git_remote_url = "git@github.com:fivetran/repo-name.git" - git_branch = "main" - } -} -``` - -Updated configuration: - -```hcl -resource "fivetran_dbt_project" "test_project" { - provider = fivetran-provider - group_id = fivetran_destination.test_destination.id - dbt_version = "1.0.1" - threads = 1 - default_schema = "dbt_demo_test_e2e_terraform" - type = "GIT" -} -``` - -For GitHub based repositories -```hcl -resource "github_repository_deploy_key" "example_repository_deploy_key" { - title = "Repository test key" - repository = "fivetran/repo-name" - key = fivetran_dbt_project.test_project.public_key - read_only = true -} -``` - -For Bitbucket based repositories -```hcl -resource "bitbucket_deploy_key" "test" { - workspace = "fivetran" - repository = "repo-name" - key = fivetran_dbt_project.test_project.public_key - label = "Repository test key" -} -``` - -Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. - - -```hcl -resource "fivetran_dbt_git_project_config" "test_project_config" { - project_id = fivetran_dbt_project.test_project.id - - folder_path = "/folder/path" - git_remote_url = "git@github.com:fivetran/repo-name.git" - git_branch = "main" -} - -``` - -### Update terraform state - -Once all configurations have been updated, run: - -``` -terraform init -upgrade -``` \ No newline at end of file diff --git a/docs/guides/version_1.4.0_update_guides.md b/docs/guides/version_1.4.0_update_guides.md deleted file mode 100644 index 5aa83333..00000000 --- a/docs/guides/version_1.4.0_update_guides.md +++ /dev/null @@ -1,104 +0,0 @@ ----- -page_title: "Version Update 1.4.0" -subcategory: "Upgrade Guides" ---- - -# Version 1.4.0 - -## What's new in 1.4.0 - -In version `1.4.0` of Fivetran Terraform provider, resource `fivetran_local_processing_agent` renamed to `fivetran_hybrid_deployment_agent` - -## Migration guide - -### Provider - -Update your provider configuration in the following way: - -Previous configuration: - -```hcl -required_providers { - fivetran = { - version = "~> 1.3.0" - source = "fivetran/fivetran" - } - } -``` - -Updated configuration: - -```hcl -required_providers { - fivetran = { - version = ">= 1.4.0" - source = "fivetran/fivetran" - } - } -``` - -### Resource `fivetran_hybrid_deployment_agent` - -Update all your local processing agent resources (`fivetran_local_processing_agent`): - -Previous configuration: - -```hcl -resource "fivetran_local_processing_agent" "test_agent" { -} -``` - -Updated configuration: - -```hcl -resource "fivetran_hybrid_deployment_agent" "test_agent" { -} -``` - -### Resource `fivetran_connector` - -Update all your connector resources (`fivetran_connector`): - -Previous configuration: - -```hcl -resource "fivetran_connector" "test_connector" { - local_processing_agent_id = agent_id -} -``` - -Updated configuration: - -```hcl -resource "fivetran_connector" "test_connector" { - hybrid_deployment_agent_id = agent_id -} -``` - -### Resource `fivetran_destination` - -Update all your destination resources (`fivetran_destination`): - -Previous configuration: - -```hcl -resource "fivetran_destination" "test_destination" { - local_processing_agent_id = agent_id -} -``` - -Updated configuration: - -```hcl -resource "fivetran_destination" "test_destination" { - hybrid_deployment_agent_id = agent_id -} -``` - -### Update terraform state - -Once all configurations have been updated, run: - -``` -terraform init -upgrade -``` \ No newline at end of file diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 54712d24..00000000 --- a/docs/index.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "Fivetran Provider" -subcategory: "" -description: |- - ---- - -# fivetran Provider - -## Example Usage - -```terraform -# Terraform 0.13+ uses the Terraform Registry: - -terraform { - required_providers { - fivetran = { - version = ">= 1.0.0" - source = "fivetran/fivetran" - } - } -} - -# Configure the Fivetran provider -provider "fivetran" { -# We recommend to use environment variables instead of explicit assignment -# api_key = var.fivetran_api_key -# api_secret = var.fivetran_api_secret -} - - -# Terraform 0.12- can be specified as: - -# Configure the Fivetran provider -# provider "fivetran" { -# api_key = "${var.fivetran_api_key}" -# api_secret = "${var.fivetran_api_secret}" -# } -``` - -## Schema - -### Required - -- `api_key` (String) -- `api_secret` (String, Sensitive) - -### Optional - -- `api_url` (String) \ No newline at end of file diff --git a/docs/resources/connector.md b/docs/resources/connector.md deleted file mode 100644 index 04f939a2..00000000 --- a/docs/resources/connector.md +++ /dev/null @@ -1,3961 +0,0 @@ ---- -page_title: "Resource: fivetran_connector" ---- - -# Resource: fivetran_connector - -This resource allows you to create, update, and delete connectors. - -## Example Usage - -```hcl -resource "fivetran_connector" "amplitude" { - group_id = fivetran_group.group.id - service = "amplitude" - - destination_schema { - name = "amplitude_connector" - } - - config { - project_credentials { - project = "project1" - api_key = "my_api_key" - secret_key = "my_secret_key" - } - - project_credentials { - project = "project2" - api_key = "my_api_key" - secret_key = "my_secret_key" - } - } -} -``` - --> Use `destination_schema` to define connector schema configuration. Field `destination_schema.name` will be mapped into `config.schema` in REST API payload. Field `destination_schema.table` will be mapped into `config.table` in REST API payload. Field `destination_schema.prefix` will be mapped into `config.schema_prefix` in REST API payload. Specify values according to [public documentation](https://fivetran.com/docs/rest-api/connectors/config) for particular connector type. - -### NOTE: resources indirect dependencies - -The connector resource receives the `group_id` parameter value from the group resource, but the destination resource depends on the group resource. When you try to destroy the destination resource infrastructure, the terraform plan is created successfully, but once you run the `terraform apply` command, it returns an error because the Fivetran API doesn't let you delete destinations that have linked connectors. To solve this problem, you should either explicitly define `depends_on` between the connector and destination: - -```hcl -resource "fivetran_connector" "amplitude" { - ... - depends_on = [ - fivetran_destination.my_destination - ] -} -``` - -or get the group ID from the destination: - -```hcl -resource "fivetran_connector" "amplitude" { - group_id = fivetran_destination.my_destination.group_id - ... -} -``` - - -## Schema - -### Required - -- `group_id` (String) The unique identifier for the Group (Destination) within the Fivetran system. -- `service` (String) The connector type id within the Fivetran system. - -### Optional - -- `auth` (Block, Optional) (see [below for nested schema](#nestedblock--auth)) -- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) -- `data_delay_sensitivity` (String) The level of data delay notification threshold. Possible values: LOW, NORMAL, HIGH, CUSTOM. The default value NORMAL. CUSTOM is only available for customers using the Enterprise plan or above. -- `data_delay_threshold` (Number) Custom sync delay notification threshold in minutes. The default value is 0. This parameter is only used when data_delay_sensitivity set to CUSTOM. -- `destination_schema` (Block, Optional) (see [below for nested schema](#nestedblock--destination_schema)) -- `hybrid_deployment_agent_id` (String) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. -- `local_processing_agent_id` (String, Deprecated) (Deprecated) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. -- `networking_method` (String) Possible values: Directly, SshTunnel, ProxyAgent. -- `private_link_id` (String) The private link ID. -- `proxy_agent_id` (String) The proxy agent ID. -- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is FALSE. -- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) -- `trust_certificates` (Boolean) Specifies whether we should trust the certificate automatically. The default value is FALSE. If a certificate is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination certificate](https://fivetran.com/docs/rest-api/certificates#approveadestinationcertificate). -- `trust_fingerprints` (Boolean) Specifies whether we should trust the SSH fingerprint automatically. The default value is FALSE. If a fingerprint is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination fingerprint](https://fivetran.com/docs/rest-api/certificates#approveadestinationfingerprint). - -### Read-Only - -- `connected_by` (String) The unique identifier of the user who has created the connector in your account. -- `created_at` (String) The timestamp of the time the connector was created in your account. -- `id` (String) The unique identifier for the connector within the Fivetran system. -- `name` (String) The name used both as the connector's name within the Fivetran system and as the source schema's name within your destination. - - -### Nested Schema for `auth` - -Optional: - -- `access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `autodesk_bim_360`: Your Autodesk BIM 360 Access Token. - - Service `azure_sql_db`: The long-lived Access token carries the information necessary to access API resources. - - Service `azure_sql_managed_db`: The long-lived Access token carries the information necessary to access API resources. - - Service `billing_platform`: Your BillingPlatform access token. - - Service `calendly`: Your Calendly access token. - - Service `docebo`: Your Docebo Access Token. - - Service `drift`: Your Drift access token. - - Service `employment_hero`: Your Employment Hero access token. - - Service `facebook_ads`: The long-lived `Access token` along with the `client_id` and `client_secret` parameters carry the information necessary to query the Facebook Ads API - - Service `facebook_pages`: The `Access Token` carries the information necessary for API resources to fetch data - - Service `freshbooks`: Your FreshBooks Access Token. - - Service `gitlab`: Your GitLab access token. - - Service `google_business_profile`: Your Google Business Profile Access token. - - Service `google_calendar`: Your Google Calendar access token. - - Service `google_classroom`: The `Access Token` that carries the information necessary for API resources to fetch data. - - Service `google_tasks`: The access token that carries the information necessary for API resources to your Google Tasks fetch data. - - Service `instagram_business`: The `Access Token` carries the information necessary for API resources to fetch data - - Service `intercom`: The long-lived `Access Token` carries the information necessary for API resources to fetch data. - - Service `medallia`: Your Medallia access token that contains all the information necessary for the API resources to fetch your data. - - Service `pinterest_organic`: Your Pinterest access token. - - Service `ramp`: Your Ramp access token. - - Service `ringcentral`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `shopify`: The Shopify access token. - - Service `slack`: Your Slack access token. - - Service `stripe`: The Stripe API Restricted Key - - Service `stripe_test`: The Stripe API Restricted Key - - Service `survey_monkey`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `tiktok_ads`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `typeform`: The Typeform API access token. - - Service `yahoo_search_ads_yahoo_japan`: Your Yahoo Search Ads Access Token. - - Service `zendesk`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `zendesk_chat`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `zendesk_sell`: The long-lived `Access token` carries the information necessary to access API resources. - - Service `zoom`: Your Zoom Access token. -- `api_key` (String) Field usage depends on `service` value: - - Service `elastic_cloud`: The Elasticsearch API key. If omitted, then basic user and password authentication will apply. - - Service `es_self_hosted`: The Elasticsearch API key. If omitted, then basic user and password authentication will apply. -- `aws_access_key` (String) Field usage depends on `service` value: - - Service `amazon_selling_partner`: `AWS Access Key` of your AWS Account User. -- `aws_secret_key` (String) Field usage depends on `service` value: - - Service `amazon_selling_partner`: `AWS Secret Key` of your AWS Account User. -- `client_access` (Block, Optional) (see [below for nested schema](#nestedblock--auth--client_access)) -- `client_id` (String) Field usage depends on `service` value: - - Service `amazon_selling_partner`: `Client ID` of your Amazon Seller/Vendor Central client application. - - Service `apple_search_ads`: Apple Search Ads REST API Client ID. Must be populated if `is_auth2_enabled` is set to `true`. - - Service `workday`: Client ID - - Service `workday_financial_management`: ID of your Workday Client App - - Service `workday_hcm`: ID of your Workday Client App - - Service `yahoo_dsp`: Your Yahoo DSP Client ID. -- `client_secret` (String) Field usage depends on `service` value: - - Service `amazon_selling_partner`: `Client Secret` of your Amazon Seller/Vendor Central client application. - - Service `workday`: Client Secret - - Service `workday_financial_management`: Secret of your Workday Client App - - Service `workday_hcm`: Secret of your Workday Client App - - Service `yahoo_dsp`: Your Yahoo DSP Client Secret. -- `consumer_key` (String) Field usage depends on `service` value: - - Service `twitter`: API Key of your app - - Service `twitter_ads`: The Twitter App consumer key. -- `consumer_secret` (String) Field usage depends on `service` value: - - Service `twitter`: API Secret of your app - - Service `twitter_ads`: The Twitter App consumer secret. -- `key_id` (String) Field usage depends on `service` value: - - Service `apple_search_ads`: Apple Search Ads REST API Key ID. Must be populated if `is_auth2_enabled` is set to `true`. -- `oauth_token` (String) Field usage depends on `service` value: - - Service `twitter`: The Twitter App access token. - - Service `twitter_ads`: The Twitter App access token. -- `oauth_token_secret` (String) Field usage depends on `service` value: - - Service `twitter`: The Twitter App access token secret. - - Service `twitter_ads`: The Twitter App access token secret. -- `ocapi_access_token` (String) -- `ocapi_refresh_token` (String) -- `previous_refresh_token` (String, Sensitive) Field usage depends on `service` value: - - Service `dynamics_365`: Previous `Refresh token` of your application. -- `realm_id` (String) Field usage depends on `service` value: - - Service `quickbooks`: `Realm ID` of your QuickBooks application. -- `refresh_token` (String, Sensitive) Field usage depends on `service` value: - - Service `adroll`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `airtable`: The long-lived refresh token along with the client ID and client secret carry the information necessary to get a new access token for API resources. - - Service `amazon_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `amazon_selling_partner`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `asana`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `autodesk_bim_360`: Your Autodesk BIM 360 Refresh Token. - - Service `azure_service_bus`: The refresh token. Required if the authentication type is `AzureActiveDirectory` - - Service `azure_sql_db`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. - - Service `azure_sql_managed_db`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. - - Service `billing_platform`: Your BillingPlatform refresh token. - - Service `bingads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `business_central`: The long-lived refresh token along with the client ID and client secret parameters carry the information necessary to get a new access token for API resources. - - Service `calendly`: Your Calendly refresh token. - - Service `docebo`: Your Docebo Refresh Token. - - Service `double_click_campaign_manager`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `double_click_publishers`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `drift`: Your Drift refresh token. - - Service `dropbox`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `dynamics_365`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `employment_hero`: Your Employment Hero refresh token. - - Service `financial_force`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `freshbooks`: Your FreshBooks Refresh Token. - - Service `front`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `gitlab`: Your GitLab refresh token. - - Service `google_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_analytics`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_analytics_4`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_analytics_mcf`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_business_profile`: Your Google Business Profile Refresh token. - - Service `google_calendar`: Your Google Calendar refresh token. - - Service `google_classroom`: The long-lived `Refresh token` of your Google Calendar client application. - - Service `google_display_and_video_360`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_play`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_search_ads_360`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_sheets`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `google_tasks`: The long-lived refresh token of your Google Tasks client application. - - Service `helpscout`: The long-lived `refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `hubspot`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `linkedin_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `linkedin_company_pages`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `microsoft_lists`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. - - Service `one_drive`: The long-lived `Refresh token` carries the information necessary to get a new access token for API resources. - - Service `optimizely`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `outreach`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `pinterest_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `pinterest_organic`: Your Pinterest refresh token. - - Service `pipedrive`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `qualtrics`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `quickbooks`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `ramp`: Your Ramp refresh token. - - Service `reddit_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `salesforce`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `salesforce_sandbox`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `salesloft`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `share_point`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. - - Service `slack`: Your Slack refresh token. - - Service `snapchat_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `spotify_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. - - Service `typeform`: The Typeform API refresh token. - - Service `workday`: OAuth Refresh Token - - Service `yahoo_search_ads_yahoo_japan`: Your Yahoo Search Ads Refresh Token. - - Service `zoho_crm`: The long-lived `Refresh token`, along with the `client_id` and `client_secret` parameters, carries the information necessary to get a new access token for API resources. - - Service `zoom`: Your Zoom Refresh token. -- `role_arn` (String) Field usage depends on `service` value: - - Service `amazon_selling_partner`: `IAM Role ARN` of your AWS Account. -- `team_id` (String) Field usage depends on `service` value: - - Service `apple_search_ads`: Apple Search Ads REST API Team ID. Must be populated if `is_auth2_enabled` is set to `true`. -- `user_access_token` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Access Token - - -### Nested Schema for `auth.client_access` - -Optional: - -- `client_id` (String) Field usage depends on `service` value: - - Service `adroll`: `Client ID` of your AdRoll client application. - - Service `airtable`: `Client ID` of your Airtable client application. - - Service `amazon_ads`: `Client ID` of your Amazon Ads client application. - - Service `asana`: `Client ID` of your Asana client application. - - Service `azure_service_bus`: `Client ID` of your Azure application. Required if the authentication type is `AzureActiveDirectory` - - Service `bingads`: `Client ID` of your Microsoft Advertising client application. - - Service `business_central`: `Client ID` of your Airtable client application. - - Service `double_click_campaign_manager`: `Client ID` of your Google Campaign Manager 360 client application. - - Service `double_click_publishers`: `Client ID` of your Google Ad Manager client application. - - Service `dropbox`: `Client ID` of your Dropbox client application. - - Service `dynamics_365`: `Client ID` of your Dynamic 365 client application, or Service Principal. - - Service `facebook_ads`: `Client ID` of your Facebook client application. - - Service `facebook_pages`: `Client ID` of your Facebook client application. - - Service `financial_force`: `Client ID` of your Salesforce client application. - - Service `front`: `Client ID` of your Front client application. - - Service `google_ads`: `Client ID` of your Google Ads client application. - - Service `google_analytics`: `Client ID` of your Google Analytics client application. - - Service `google_analytics_4`: `Client ID` of your Google Analytics client application. - - Service `google_analytics_mcf`: `Client ID` of your Google Analytics client application. - - Service `google_display_and_video_360`: `Client ID` of your Google Display & Video 360 client application. - - Service `google_play`: `Client ID` of your Google Play client application. - - Service `google_search_ads_360`: `Client ID` of your Google Search Ads 360 client application. - - Service `google_search_console`: `Client ID` of your Google Search Console client application. - - Service `google_sheets`: `Client ID` of your Google Sheets client application. - - Service `helpscout`: `Client ID` of your Help Scout client application. - - Service `hubspot`: `Client ID` of your HubSpot client application. - - Service `instagram_business`: `Client ID` of your Facebook client application. - - Service `linkedin_ads`: `Client ID` of your LinkedIn client application. - - Service `linkedin_company_pages`: `Client ID` of your LinkedIn client application. - - Service `microsoft_lists`: `Client ID` of your Microsoft client application. - - Service `one_drive`: `Client ID` of your Microsoft OneDrive client application. - - Service `optimizely`: `Client ID` of your Optimizely client application. - - Service `outreach`: `Client ID` of your Outreach client application. - - Service `pardot`: `Client ID` of your Pardot client application. - - Service `pinterest_ads`: `Client ID` of your Pinterest client application. - - Service `pipedrive`: `Client ID` of your Pipedrive client application. - - Service `qualtrics`: `Client ID` of your Qualtrics client application. - - Service `quickbooks`: `Client ID` of your QuickBooks client application. - - Service `reddit_ads`: `Client ID` of your Reddit Ads client application. - - Service `salesforce`: `Client ID` of your Salesforce client application. - - Service `salesforce_sandbox`: `Client ID` of your Salesforce client application. - - Service `share_point`: `Client ID` of your Microsoft client application. - - Service `snapchat_ads`: `Client ID` of your Snapchat Ads client application. - - Service `spotify_ads`: `Client ID` of your Ad Studio application. - - Service `survey_monkey`: `Client ID` of your SurveyMonkey client application. - - Service `tiktok_ads`: `Client ID` of your TikTok Ads client application. - - Service `twitter`: `Client ID` of your Twitter client application. - - Service `twitter_ads`: `Client ID` of your Twitter Ads client application. - - Service `typeform`: The Typeform client ID. - - Service `yahoo_gemini`: `Client ID` of your Yahoo Gemini client application. - - Service `youtube_analytics`: `Client ID` of your Youtube client application. - - Service `zoho_crm`: `Client ID` of your Zoho client application. -- `client_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `adroll`: `Client Secret` of your AdRoll client application. - - Service `airtable`: `Client Secret` of your Airtable client application. - - Service `amazon_ads`: `Client Secret` of your Amazon Ads client application. - - Service `asana`: `Client Secret` of your Asana client application. - - Service `azure_service_bus`: `Client Secret` of your Azure application. Required if the authentication type is `AzureActiveDirectory` - - Service `bingads`: `Client Secret` of your Microsoft Advertising client application. - - Service `business_central`: `Client Secret` of your Airtable client application. - - Service `double_click_campaign_manager`: `Client Secret` of your Google Campaign Manager 360 client application. - - Service `double_click_publishers`: `Client Secret` of your Google Ad Manager client application. - - Service `dropbox`: `Client Secret` of your Dropbox client application. - - Service `dynamics_365`: `Client Secret` of your Dynamic 365 client application, or Service Principal. - - Service `facebook_ads`: `Client Secret` of your Facebook client application. - - Service `facebook_pages`: `Client Secret` of your Facebook client application. - - Service `financial_force`: `Client Secret` of your Salesforce client application. - - Service `front`: `Client Secret` of your Front client application. - - Service `google_ads`: `Client Secret` of your Google Ads client application. - - Service `google_analytics`: `Client Secret` of your Google Analytics client application. - - Service `google_analytics_4`: `Client Secret` of your Google Analytics client application. - - Service `google_analytics_mcf`: `Client Secret` of your Google Analytics client application. - - Service `google_display_and_video_360`: `Client Secret` of your Google Display & Video 360 client application. - - Service `google_play`: `Client Secret` of your Google Play client application. - - Service `google_search_ads_360`: `Client Secret` of your Google Search Ads 360 client application. - - Service `google_search_console`: `Client Secret` of your Google Search Console client application. - - Service `google_sheets`: `Client Secret` of your Google Sheets client application. - - Service `helpscout`: `Client Secret` of your Help Scout client application. - - Service `hubspot`: `Client Secret` of your HubSpot client application. - - Service `instagram_business`: `Client Secret` of your Facebook client application. - - Service `linkedin_ads`: `Client Secret` of your LinkedIn client application. - - Service `linkedin_company_pages`: `Client Secret` of your LinkedIn client application. - - Service `microsoft_lists`: `Client Secret` of your Microsoft client application. - - Service `one_drive`: `Client Secret` of your Microsoft OneDrive client application. - - Service `optimizely`: `Client Secret` of your Optimizely client application. - - Service `outreach`: `Client Secret` of your Outreach client application. - - Service `pardot`: `Client Secret` of your Pardot client application. - - Service `pinterest_ads`: `Client Secret` of your Pinterest client application. - - Service `pipedrive`: `Client Secret` of your Pipedrive client application. - - Service `qualtrics`: `Client Secret` of your Qualtrics client application. - - Service `quickbooks`: `Client Secret` of your QuickBooks client application. - - Service `reddit_ads`: `Client Secret` of your Reddit Ads client application. - - Service `salesforce`: `Client Secret` of your Salesforce client application. - - Service `salesforce_sandbox`: `Client Secret` of your Salesforce client application. - - Service `share_point`: `Client Secret` of your Microsoft client application. - - Service `snapchat_ads`: `Client Secret` of your Snapchat Ads client application. - - Service `spotify_ads`: `Client Secret` of your Ad Studio application. - - Service `survey_monkey`: `Client Secret` of your SurveyMonkey client application. - - Service `tiktok_ads`: `Client Secret` of your TikTok Ads client application. - - Service `twitter`: `Client Secret` of your Twitter client application. - - Service `twitter_ads`: `Client Secret` of your Twitter Ads client application. - - Service `typeform`: The Typeform client secret. - - Service `yahoo_gemini`: `Client Secret` of your Yahoo Gemini client application. - - Service `youtube_analytics`: `Client Secret` of your Youtube client application. - - Service `zoho_crm`: `Client Secret` of your Zoho client application. -- `developer_token` (String) Field usage depends on `service` value: - - Service `google_ads`: Your approved `Developer token` to connect to the Google Ads API. -- `user_agent` (String) Field usage depends on `service` value: - - Service `google_ads`: Your company's name in your Google Ads client application. - - - - -### Nested Schema for `config` - -Optional: - -- `abs_connection_method` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage connection method -- `abs_connection_string` (String, Sensitive) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage connection string. - - Service `braze`: Connection String -- `abs_container_address` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage container address -- `abs_container_name` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage container name. - - Service `braze`: Container Name -- `abs_host_ip` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage host IP -- `abs_host_user` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage username -- `abs_prefix` (String) Field usage depends on `service` value: - - Service `braze`: Prefix -- `abs_public_key` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Azure Blob Storage public key -- `academy_id` (String) Field usage depends on `service` value: - - Service `workramp`: Your WorkRamp academy ID. -- `access_id` (String) Field usage depends on `service` value: - - Service `planful`: Your Planful access ID. -- `access_key` (String, Sensitive) Field usage depends on `service` value: - - Service `gainsight_customer_success`: The access key for API authentication. - - Service `gongio`: Your Gongio Access key. - - Service `planful`: Your Planful access key. - - Service `retailnext`: Your RetailNext access key. -- `access_key_id` (String, Sensitive) Field usage depends on `service` value: - - Service `appsflyer`: Your AWS access key ID. - - Service `aws_cost_report`: Access Key ID - - Service `checkout`: Your Checkout.com access key ID. - - Service `s3`: Access Key ID - - Service `wasabi_cloud_storage`: Access Key ID -- `access_key_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_cost_report`: Access Key Secret - - Service `checkout`: Your Checkout.com access key secret. - - Service `gongio`: Your Gongio Access Key Secret. - - Service `s3`: Access Key Secret - - Service `wasabi_cloud_storage`: Access Key Secret -- `access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `7shifts`: Your 7shifts access token. - - Service `attio`: Your Attio bearer token - - Service `big_commerce`: API access token of your store. - - Service `bitly`: Your Bitly access token. - - Service `calabrio`: Your Calabrio access token. - - Service `coupa`: Your Coupa access token fetched using client_id and client_secret - - Service `deputy`: Your Deputy API access token. - - Service `getfeedback`: Your GetFeedback Access token. - - Service `gocardless`: Your GoCardless API token. - - Service `ironclad`: Your Ironclad access token. - - Service `kustomer`: Your Kustomer API key. - - Service `lattice`: Your Lattice API access token. - - Service `launchdarkly`: Your LaunchDarkly access token. - - Service `nylas`: Your Nylas access_token. - - Service `planhat`: Your Planhat access token. - - Service `rollbar`: Your Rollbar Access Token. - - Service `samsara`: Your Samsara API token. - - Service `slab`: Your Slab API key. - - Service `talkwalker`: Your Talkwalker access token. - - Service `workable`: Your Workable Access Token. - - Service `workramp`: Your WorkRamp access token. -- `access_type` (String) Field usage depends on `service` value: - - Service `share_point`: Access Type -- `account` (String) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: The NetSuite Account ID. - - Service `ordway`: Your Ordway account type. -- `account_access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `rollbar`: Your Rollbar account access token. -- `account_id` (String) Field usage depends on `service` value: - - Service `appcues`: Your Appcues Account ID. - - Service `brightcove`: Your Brightcove account ID. - - Service `cin7core`: Your Cin7 Core account ID. - - Service `dear`: Your Dear Account ID. - - Service `harvest`: Your Harvest Account ID. - - Service `optimizely`: Your Optimizely account ID. - - Service `udemy_business`: Your Udemy Business account ID. -- `account_ids` (Set of String) Field usage depends on `service` value: - - Service `taboola`: Specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. -- `account_key` (String, Sensitive) Field usage depends on `service` value: - - Service `cosmos`: The read-only primary or secondary account key for the database account. Required for the `ACCOUNT_KEY` data access method. - - Service `simplesat`: Your Simplesat account key. -- `account_name` (String) Field usage depends on `service` value: - - Service `eventsforce`: Your Eventsforce account name. - - Service `freshdesk_contact_center`: Your Freshdesk Contact Center account name. - - Service `happyfox`: Your HappyFox account name. - - Service `maxio_saasoptics`: Your Maxio SaaSOptics account name. - - Service `talkdesk`: Your Talkdesk Account Name. - - Service `udemy_business`: Your Udemy Business account name. -- `account_plan` (String) Field usage depends on `service` value: - - Service `tymeshift`: Your Tymeshift account plan. -- `account_region` (String) Field usage depends on `service` value: - - Service `iterable`: If your Iterable account URL starts with `https://app.eu.iterable.com` then provide `EU` else `US` -- `account_sid` (String) Field usage depends on `service` value: - - Service `fone_dynamics`: Your Fone Dynamics account SID. -- `account_sync_mode` (String) Field usage depends on `service` value: - - Service `itunes_connect`: Account Sync Mode -- `account_token` (String, Sensitive) Field usage depends on `service` value: - - Service `konnect_insights`: Your Konnect Insights Account Token. -- `account_type` (String) Field usage depends on `service` value: - - Service `freightview`: Your Freightview Account Type. -- `accounts` (Set of String) Field usage depends on `service` value: - - Service `bingads`: Specific accounts to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. - - Service `facebook`: List of accounts of which connector will sync the data. - - Service `facebook_ad_account`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `facebook_ads`: List of accounts of which connector will sync the data. - - Service `google_ads`: The list of Account IDs to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `google_analytics`: The list of specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. - - Service `google_analytics_4`: The list of specific Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. - - Service `google_analytics_mcf`: Specific Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. - - Service `google_search_ads_360`: Specific accounts to sync. Must be populated if `accountsSyncMode` is set to `SPECIFIC_ACCOUNTS`. - - Service `instagram_business`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `itunes_connect`: Accounts - - Service `linkedin_ads`: Specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. - - Service `spotify_ads`: The list of Ad Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. - - Service `tiktok_ads`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `twilio`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `twitter`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. - - Service `twitter_ads`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. -- `accounts_reddit_ads` (Block Set) (see [below for nested schema](#nestedblock--config--accounts_reddit_ads)) -- `accounts_sync_mode` (String) Field usage depends on `service` value: - - Service `google_search_ads_360`: Whether to sync all accounts or specific. -- `action_breakdowns` (Set of String) Field usage depends on `service` value: - - Service `facebook`: List of action_breakdowns which connector will sync. [Possible action_breakdowns values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#actionbreakdowns). -- `action_report_time` (String) Field usage depends on `service` value: - - Service `facebook`: The report time of action stats. [Possible action_report time values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#actionreporttime). -- `ad_analytics` (String) Field usage depends on `service` value: - - Service `linkedin_ads`: Whether to sync all analytic reports or specific. Default value: `AllReports` -- `ad_unit_view` (String) Field usage depends on `service` value: - - Service `double_click_publishers`: Ad unit view for the report. -- `admin_api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `splitio`: Your Split admin api key. -- `adobe_analytics_configurations` (Block Set) (see [below for nested schema](#nestedblock--config--adobe_analytics_configurations)) -- `advertisables` (Set of String) Field usage depends on `service` value: - - Service `adroll`: Specific advertisables to sync. Must be populated if `sync_mode` is set to `SpecificAdvertisables`. -- `advertisers` (Set of String) Field usage depends on `service` value: - - Service `google_display_and_video_360`: The list of advertisers to include into a sync. This parameter only takes effect when `config_method` is set to `CREATE_NEW`. - - Service `google_search_ads_360`: Specific advertisers to sync. Must be populated if `advertisersSyncMode` is set to `SPECIFIC_ADVERTISERS`. - - Service `pinterest_ads`: Specific Advertisers to sync. Must be populated if `sync_mode` is set to `SpecificAdvertisers`. -- `advertisers_id` (Set of String) Field usage depends on `service` value: - - Service `yahoo_gemini`: Specific Advertiser IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. -- `advertisers_sync_mode` (String) Field usage depends on `service` value: - - Service `google_search_ads_360`: Whether to sync all or specific advertisers. -- `advertisers_with_seat` (Set of String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Specific Advertisers to sync. Must be populated if `sync_mode_advertiser` is set to `SPECIFIC_ADVERTISERS`. Pay attention to the format: `AdvertiserId:SeatId` -- `agent_config_method` (String) -- `agent_host` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The agent host. - - Service `db2i_sap_hva`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `hana_sap_hva_b1`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `hana_sap_hva_ecc`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `hana_sap_hva_ecc_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `hana_sap_hva_s4`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `hana_sap_hva_s4_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `oracle_hva`: The host of the agent. This is the same as database host, since the agent must be installed in the same machine as source database. - - Service `oracle_sap_hva`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `oracle_sap_hva_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. - - Service `sql_server_hva`: The host address of the machine running the agent. Often the same as the DB host. - - Service `sql_server_sap_ecc_hva`: The host address of the machine running the agent. Often the same as the DB host. -- `agent_ora_home` (String) Field usage depends on `service` value: - - Service `oracle_hva`: The home directory of the Oracle database. - - Service `oracle_sap_hva`: The home directory of the Oracle database. -- `agent_password` (String, Sensitive) Field usage depends on `service` value: - - Service `db2i_hva`: The agent password. - - Service `db2i_sap_hva`: The agent user's password. It must have a minimum length of 10 characters. - - Service `hana_sap_hva_b1`: The agent user's password. It must have a minimum length of 10 characters. - - Service `hana_sap_hva_ecc`: The agent user's password. It must have a minimum length of 10 characters. - - Service `hana_sap_hva_ecc_netweaver`: The agent user's password. It must have a minimum length of 10 characters. - - Service `hana_sap_hva_s4`: The agent user's password. It must have a minimum length of 10 characters. - - Service `hana_sap_hva_s4_netweaver`: The agent user's password. It must have a minimum length of 10 characters. - - Service `oracle_hva`: The agent user's password. It should have a minimum length of 10 characters. - - Service `oracle_sap_hva`: The agent user's password. It must have a minimum length of 10 characters. - - Service `oracle_sap_hva_netweaver`: The agent user's password. It must have a minimum length of 10 characters. - - Service `sql_server_hva`: The password for the agent user. - - Service `sql_server_sap_ecc_hva`: The password for the agent user. -- `agent_port` (Number) Field usage depends on `service` value: - - Service `db2i_hva`: The agent port. - - Service `db2i_sap_hva`: The port number of the agent. - - Service `hana_sap_hva_b1`: The port number of the agent. - - Service `hana_sap_hva_ecc`: The port number of the agent. - - Service `hana_sap_hva_ecc_netweaver`: The port number of the agent. - - Service `hana_sap_hva_s4`: The port number of the agent. - - Service `hana_sap_hva_s4_netweaver`: The port number of the agent. - - Service `oracle_hva`: The port number of the agent. - - Service `oracle_sap_hva`: The port number of the agent. - - Service `oracle_sap_hva_netweaver`: The port number of the agent. - - Service `sql_server_hva`: The port that the agent has open for Fivetran's connection. Default value is 4343. - - Service `sql_server_sap_ecc_hva`: The port that the agent has open for Fivetran's connection. Default value is 4343. -- `agent_public_cert` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The public certificate for the agent. - - Service `db2i_sap_hva`: The agent public certificate. - - Service `hana_sap_hva_b1`: The agent public certificate. - - Service `hana_sap_hva_ecc`: The agent public certificate. - - Service `hana_sap_hva_ecc_netweaver`: The agent public certificate. - - Service `hana_sap_hva_s4`: The agent public certificate. - - Service `hana_sap_hva_s4_netweaver`: The agent public certificate. - - Service `oracle_hva`: The agent public certificate. - - Service `oracle_sap_hva`: The agent public certificate. - - Service `oracle_sap_hva_netweaver`: The agent public certificate. - - Service `sql_server_hva`: The public certificate generated by the agent. - - Service `sql_server_sap_ecc_hva`: The public certificate generated by the agent. -- `agent_user` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The agent user name. - - Service `db2i_sap_hva`: The agent's user. - - Service `hana_sap_hva_b1`: The agent's user. - - Service `hana_sap_hva_ecc`: The agent's user. - - Service `hana_sap_hva_ecc_netweaver`: The agent's user. - - Service `hana_sap_hva_s4`: The agent's user. - - Service `hana_sap_hva_s4_netweaver`: The agent's user. - - Service `oracle_hva`: The agent's user. - - Service `oracle_sap_hva`: The agent's user. - - Service `oracle_sap_hva_netweaver`: The agent's user. - - Service `sql_server_hva`: The agent's username. - - Service `sql_server_sap_ecc_hva`: The agent's username. -- `aggregation` (String) Field usage depends on `service` value: - - Service `facebook`: Options to select aggregation duration. [Possible aggregation values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#aggregation). -- `agreement_grant_token` (String, Sensitive) Field usage depends on `service` value: - - Service `economic`: Your E-conomic X-AgreementGrantToken goes here. -- `always_encrypted` (Boolean) Field usage depends on `service` value: - - Service `aurora`: Require TLS through Tunnel - - Service `aurora_postgres`: Require TLS through Tunnel - - Service `azure_cosmos_for_mongo`: Require TLS - - Service `azure_postgres`: Require TLS through Tunnel - - Service `azure_sql_db`: Require TLS through Tunnel. - - Service `azure_sql_managed_db`: Require TLS. - - Service `clarity`: Require TLS through Tunnel. - - Service `cockroachdb`: Require TLS - - Service `db2i_hva`: Require TLS through Tunnel - - Service `db2i_sap_hva`: Require TLS through Tunnel - - Service `documentdb`: Require TLS encryption. - - Service `dynamics_365_fo`: Require TLS through Tunnel. - - Service `ehr`: Require TLS through Tunnel. - - Service `elastic_cloud`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. - - Service `es_self_hosted`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. - - Service `google_cloud_mysql`: Require TLS through Tunnel - - Service `google_cloud_postgresql`: Require TLS through Tunnel - - Service `google_cloud_sqlserver`: Require TLS. - - Service `heroku_postgres`: Require TLS through Tunnel - - Service `magento_mysql`: Require TLS through Tunnel - - Service `magento_mysql_rds`: Require TLS through Tunnel - - Service `maria`: Require TLS through Tunnel - - Service `maria_azure`: Require TLS through Tunnel - - Service `maria_rds`: Require TLS through Tunnel - - Service `mongo`: Require TLS - - Service `mongo_sharded`: Require TLS through Tunnel - - Service `mysql`: Require TLS through Tunnel - - Service `mysql_azure`: Require TLS through Tunnel - - Service `mysql_rds`: Require TLS through Tunnel - - Service `opendistro`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. - - Service `opensearch`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. - - Service `oracle`: Require TLS through Tunnel - - Service `oracle_ebs`: Require TLS through Tunnel - - Service `oracle_hva`: Require TLS through Tunnel - - Service `oracle_rac`: Require TLS through Tunnel - - Service `oracle_rds`: Require TLS through Tunnel - - Service `oracle_sap_hva`: Require TLS through Tunnel - - Service `oracle_sap_hva_netweaver`: Require TLS. - - Service `postgres`: Require TLS through Tunnel - - Service `postgres_rds`: Require TLS through Tunnel - - Service `sql_server`: Require TLS. - - Service `sql_server_hva`: Require TLS. - - Service `sql_server_rds`: Require TLS. - - Service `sql_server_sap_ecc_hva`: Require TLS. -- `api` (String) Field usage depends on `service` value: - - Service `freshsuccess`: Set this parameter to `api`. -- `api_access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `shopify`: API access token of your custom app. - - Service `square`: The Square API access token of your application. -- `api_environment` (String) Field usage depends on `service` value: - - Service `afterpay`: Your Afterpay API environment. -- `api_id` (String) Field usage depends on `service` value: - - Service `aircall`: Your Aircall API ID. -- `api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `15five`: Your 15five API key. - - Service `360learning`: Your 360Learning API Key. - - Service `6sense`: Your 6sense API Key. - - Service `activecampaign`: Your ActiveCampaign API key. - - Service `affinity`: Your Affinity API key. - - Service `airtable`: API key of the Airtable account. - - Service `algolia`: Your Algolia API key. - - Service `anvyl`: Your Anvyl API key. - - Service `appcues`: Your Appcues API key. - - Service `assembled`: Your Assembled API key. - - Service `atlassian_jira_align`: Your Jira Align API key. - - Service `atlassian_ops_genie`: Your Opsgenie API key - - Service `attentive`: Your Attentive API key. - - Service `aumni`: Your Aumni API key. - - Service `avantlink`: Your AvantLink API key. - - Service `ballotready`: Your BallotReady API token. - - Service `bamboohr`: Your API Key. - - Service `bazaarvoice`: Your Bazaarvoice API key. - - Service `betterworks`: Your Betterworks API key. - - Service `bizzabo`: Your Bizzabo API key. - - Service `brave_ads`: Your Brave Ads API key - - Service `braze`: Your Braze API Key. - - Service `brevo`: Your Brevo API key. - - Service `bubble`: Your Bubble API token. - - Service `buildium`: Your Buildium private API key. - - Service `callrail`: Your CallRail API key. - - Service `campaignmonitor`: Your Campaign Monitor API key. - - Service `canny`: Your Canny API key. - - Service `chargebee_product_catalog_1`: Your Chargebee Product Catalog 1 API key. - - Service `chargebee_product_catalog_2`: Your Chargebee API key. - - Service `chartmogul`: Your ChartMogul API key. - - Service `chorusai`: Your Chorus API key. - - Service `churnkey`: Your Churnkey API Key. - - Service `churnzero`: Your ChurnZero API key. - - Service `cimis`: Your Cimis API key. - - Service `circleci`: Your CircleCI API Key. - - Service `clickup`: Your ClickUp API key. - - Service `close`: Your Close API key. - - Service `cloudbeds`: Your Cloudbeds API key. - - Service `clubspeed`: Your Clubspeed API key. - - Service `coassemble`: Your Coassemble API key. - - Service `codefresh`: Your Codefresh API Key. - - Service `column`: Your Column API key. - - Service `concord`: Your Concord API key. - - Service `confluent_cloud`: API Key - - Service `contrast_security`: Your Contrast Security API Key. - - Service `copper`: Your Copper API key. - - Service `coupa`: Your Coupa API key. - - Service `datadog`: Your Datadog API key. - - Service `dbt_cloud`: Your dbt Cloud service token. - - Service `dcl_logistics`: Your DCL Logistics API key. - - Service `delighted`: API Key for your Delighted account - - Service `destini`: Your Destini API Key. - - Service `donus`: Your Donus API key. - - Service `doorloop`: Your DoorLoop API key. - - Service `drata`: Your Drata API Key. - - Service `dropbox_sign`: Your Dropbox Sign API key. - - Service `duoplane`: Your Duoplane API key. - - Service `easypost`: Your EasyPost API Key. - - Service `electronic_tenant_solutions`: Your Electronic Tenant Solutions API key. - - Service `eventsforce`: Your Eventsforce API secret key. - - Service `everhour`: Your Everhour API Token. - - Service `factorial`: Your Factorial API key. - - Service `firehydrant`: Your FireHydrant API key. - - Service `float`: Your Float API key. - - Service `forj_community`: Your Forj Community API key. - - Service `fourkites`: Your FourKites API key. - - Service `freightview`: Your Freightview API key. - - Service `freshdesk`: Your Freshdesk API Key. - - Service `freshdesk_contact_center`: Your Freshdesk Contact Center API key. - - Service `freshsales`: Your Freshsales API key. - - Service `freshservice`: Your Freshservice API Key. - - Service `freshsuccess`: Your Freshsuccess API key. - - Service `freshteam`: Your Freshteam API key. - - Service `friendbuy`: Your Friendbuy API key. - - Service `fullstory`: Your Fullstory API key. - - Service `gainsight_product_experience`: Your Gainsight Product Experience API key. - - Service `gem`: Your Gem API key. - - Service `gorgias`: Your Gorgias API key. - - Service `greenhouse`: Your Greenhouse API key. - - Service `grepsr`: Your Grepsr API Key. - - Service `grin`: Your Grin API key. - - Service `happyfox`: Your HappyFox API key. - - Service `height`: Your Height API key. - - Service `helpshift`: Your Helpshift API Key. - - Service `incidentio`: Your incident.io API key. - - Service `infobip`: Your Infobip API key. - - Service `insightly`: Your Insightly API key. - - Service `integrate`: Your Integrate API key. - - Service `invoiced`: Your Invoiced API key. - - Service `iterable`: Your Iterable API key. - - Service `ivanti`: Your Ivanti API Key. - - Service `jotform`: Your Jotform API key. - - Service `justcall`: Your JustCall API key. - - Service `katana`: Your Katana API key. - - Service `kevel`: Your Kevel API key. - - Service `keypay`: Your KeyPay API key. - - Service `kisi`: Your Kisi API key. - - Service `klaviyo`: Your Klaviyo API key. - - Service `learnupon`: Your Learnupon API key. - - Service `lemlist`: Your Lemlist API key. - - Service `lever`: Your Lever API key. - - Service `liftoff`: Your Liftoff API key. - - Service `linear`: Your Linear API key. - - Service `linksquares`: Your LinkSquares API key. - - Service `lob`: Your Lob API key. - - Service `loop`: Your Loop API key. - - Service `luma`: Your Luma API key. - - Service `mailgun`: Your Mailgun API key. - - Service `mambu`: Your Mambu API key. - - Service `mandrill`: Your Mandrill API key. - - Service `maxio_chargify`: Enter Your API Key. - - Service `messagebird`: Your MessageBird API key. - - Service `mountain`: Your MNTN API key. - - Service `myosh`: Your myosh API key. - - Service `okendo`: Your Okendo API key. - - Service `ometria`: Your Ometria API Key. - - Service `oncehub`: Your OnceHub API key. - - Service `ordway`: Your Ordway API key. - - Service `ortto`: Your Ortto API key. - - Service `pagerduty`: Your PagerDuty API key. - - Service `papershift`: Your Papershift API Key - - Service `partnerize`: Your Partnerize user API key. - - Service `persona`: Your Persona API key. - - Service `picqer`: Your Picqer API key. - - Service `pinpoint`: Your Pinpoint API key. - - Service `pipe17`: The Pipe17 API key. - - Service `placerai`: Your Placer.ai API key. - - Service `playvox`: Your Playvox API Key. - - Service `posthog`: Your PostHog API key. - - Service `prive`: Your Prive API key. - - Service `qualaroo`: Your Qualaroo API Key. - - Service `quorum`: Your Quorum API key. - - Service `rebound_returns`: Your ReBound Returns API key. - - Service `recurly`: The Recurly API key. - - Service `replyio`: Your Reply API key. - - Service `revenuecat`: Your RevenueCat API key. - - Service `reviewsio`: Your REVIEWS.io API key. - - Service `revops`: Your RevOps bearer token. - - Service `ricochet360`: Your Ricochet360 API key. - - Service `ringover`: Your Ringover API key. - - Service `rippling`: Your Rippling API key. - - Service `rocketlane`: Your Rocketlane API key. - - Service `rootly`: Your Rootly API key. - - Service `safebase`: Your SafeBase API key. - - Service `sage_hr`: Your Sage HR API key. - - Service `sailthru`: The Sailthru API key. - - Service `salsify`: Your Salsify API Key. - - Service `security_journey`: Your Security Journey API key. - - Service `sendgrid`: The SendGrid API key. - - Service `sendinblue`: Your Sendinblue API key. - - Service `shortcut`: Your Shortcut API token. - - Service `shortio`: Your Short.io API key. - - Service `simplesat`: Your Simplesat API key. - - Service `sistrix`: Your SISTRIX API key. - - Service `skilljar`: Your Skilljar API key. - - Service `smartwaiver`: Your Smartwaiver API key. - - Service `snyk`: Your Snyk API key. - - Service `sonarqube`: Your Sonarqube API key. - - Service `sparkpost`: Your SparkPost API key. - - Service `squarespace`: Your Squarespace API key. - - Service `stackadapt`: Your StackAdapt API key. - - Service `statuspage`: Your Statuspage API Key. - - Service `stripe`: Restricted API key - - Service `stripe_test`: Restricted API key - - Service `subscript`: Your Subscript API key. - - Service `survicate`: Your Survicate API Key. - - Service `teads`: Your Teads API key. - - Service `teamtailor`: Your Teamtailor API key. - - Service `testrail`: Your TestRail API key. - - Service `ticket_tailor`: Your Ticket Tailor API key. - - Service `transcend`: Your Transcend API Key. - - Service `trello`: Your TRELLO api key. - - Service `uppromote`: Your UpPromote API key. - - Service `veeqo`: Your Veeqo API key. - - Service `visit_by_ges`: Your Visit by GES API key. - - Service `vitally`: Your Vitally API key. - - Service `vonage`: Your Vonage API Key. - - Service `vts`: Your VTS API key. - - Service `webconnex`: Your Webconnex API key. - - Service `xsolla`: Your Xsolla API key. - - Service `yougov_sport`: Your Yougov Sport API key. - - Service `zingtree`: Your Zingtree API key. -- `api_key_api_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `revel`: Your Revel Systems API Key and API Secret. -- `api_keys` (Set of String, Sensitive) Field usage depends on `service` value: - - Service `mandrill`: Comma-separated list of API keys. Required if `use_api_keys` is set to `true`. -- `api_password` (String, Sensitive) Field usage depends on `service` value: - - Service `duoplane`: Your Duoplane API password. -- `api_quota` (Number) Field usage depends on `service` value: - - Service `marketo`: Allowed number of API requests to Marketo instance per day, the default value is 10000. -- `api_requests_per_minute` (Number) Field usage depends on `service` value: - - Service `qualtrics`: Allowed number of API requests to Qualtrics per minute, the default value is 2000. Maximum allowed number is 3000 because brands may make up to 3000 API requests per minute across all of its API calls. -- `api_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `confluent_cloud`: API Secret - - Service `forj_community`: Your Forj Community API secret. - - Service `friendbuy`: Your Friendbuy API secret. - - Service `justcall`: Your JustCall API Secret. - - Service `liftoff`: Your Liftoff API secret. - - Service `mixpanel`: Mixpanel API Secret. - - Service `qualaroo`: Your Qualaroo API Secret. - - Service `sailthru`: The Sailthru API secret. - - Service `vonage`: Your Vonage API Secret. -- `api_secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `alchemer`: Your Alchemer API Secret key. -- `api_server` (String) Field usage depends on `service` value: - - Service `sigma_computing_source`: Your Sigma Computing api server. -- `api_token` (String, Sensitive) Field usage depends on `service` value: - - Service `aha`: Your Aha! API key. - - Service `aircall`: Your Aircall API Token. - - Service `appsflyer`: API Token for AppsFlyer's PULL API. - - Service `awin`: Your Awin API Token. - - Service `brex`: Your Brex API token - - Service `buildkite`: Your Buildkite API token. - - Service `buzzsprout`: Your Buzzsprout API token. - - Service `centra`: Your Centra API Token. - - Service `chameleon`: Your Chameleon API token. - - Service `clari`: Your Clari API token. - - Service `confluence`: The Confluence API token. - - Service `dixa`: Your Dixa API token. - - Service `drip`: Your Drip API Token. - - Service `factbird`: Your Factbird API token. - - Service `fone_dynamics`: Your Fone Dynamics API token. - - Service `fountain`: Your Fountain API token. - - Service `g2`: Your G2 API token. - - Service `gladly`: Your Gladly API Token. - - Service `hibob`: Your Hibob API token. - - Service `kandji`: Your Kandji API token. - - Service `livechat`: Your Livechat Access token. - - Service `livechat_partner`: Your Livechat Partner API Token. - - Service `maxio_saasoptics`: Your Maxio SaaSOptics API token. - - Service `megaphone`: Your Megaphone API token. - - Service `missive`: Your Missive API token. - - Service `mixmax`: Mixmax API token. - - Service `okta`: Your Okta API token. - - Service `ordway`: Your Ordway API token. - - Service `pipedrive`: (Optional)Your Pipedrive personal API token - - Service `pivotal_tracker`: Pivotal Tracker API token. - - Service `postmark`: Your Postmark account API token. - - Service `productive`: Your Productive API token. - - Service `qualtrics`: API token of the Qualtrics account. - - Service `rakutenadvertising`: Your Rakuten Advertising API token. - - Service `recharge`: The Recharge API token. - - Service `referralhero`: Your Referralhero API token. - - Service `resource_management_by_smartsheet`: Your Resource Management by Smartsheet API token. - - Service `retently`: Your Retently API token. - - Service `rundeck`: Your Rundeck API token. - - Service `safetyculture`: Your SafetyCulture API token. - - Service `sensor_tower`: Your Sensor Tower API token. - - Service `simplecast`: Your Simplecast API token. - - Service `snyk`: Your Snyk API token. - - Service `textus`: Your TextUs API token. - - Service `toggl_track`: Your Toggl Track API token - - Service `trello`: Your TRELLO api token. - - Service `trisolute`: Your Trisolute API token. - - Service `vwo`: Your VWO API token. - - Service `web_scraper`: Your Web Scraper API token. - - Service `zendesk`: Zendesk API tokens are auto-generated passwords in the Support admin interface. - - Service `zendesk_sunshine`: Zendesk API tokens are auto-generated passwords in the Support admin interface. -- `api_type` (String) -- `api_url` (String) Field usage depends on `service` value: - - Service `braze`: Your Braze API URL. -- `api_usage` (String) Field usage depends on `service` value: - - Service `zendesk`: Maximum Zendesk Api Usage allowed -- `api_user_identifier` (String, Sensitive) Field usage depends on `service` value: - - Service `shipnetwork`: Your ShipNetwork API user identifier. -- `api_user_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `shipnetwork`: Your ShipNetwork API user secret. -- `api_utilization_percentage` (String) Field usage depends on `service` value: - - Service `kustomer`: Api Utilization Percentage -- `api_version` (String) Field usage depends on `service` value: - - Service `pardot`: API Version -- `app_id` (String, Sensitive) Field usage depends on `service` value: - - Service `churnkey`: Your Churnkey APP ID. - - Service `open_exchange_rates`: Your Open Exchange Rates App Id. -- `app_ids` (Set of String) Field usage depends on `service` value: - - Service `pendo`: Specific App IDs to sync. Must be populated if `sync_mode` is set to `SpecificAppIds`. -- `app_ids_appsflyer` (Block Set) (see [below for nested schema](#nestedblock--config--app_ids_appsflyer)) -- `app_key` (String, Sensitive) Field usage depends on `service` value: - - Service `loopio`: Your Loopio App Key. - - Service `servicetitan`: Your ServiceTitan app key. - - Service `yotpo`: Your Yotpo App Key -- `app_reference` (String) Field usage depends on `service` value: - - Service `brightpearl`: Your Brightpearl app reference. -- `app_secret_token` (String, Sensitive) -- `app_specific_password` (String, Sensitive) Field usage depends on `service` value: - - Service `itunes_connect`: Your app-specific password -- `app_sync_mode` (String) Field usage depends on `service` value: - - Service `itunes_connect`: Whether to sync all apps or specific apps. -- `append_file_option` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: If you know that the source completely overwrites the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `azure_blob_storage`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `box`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `dropbox`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `ftp`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `gcs`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `google_drive`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `kinesis`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `s3`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `sftp`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `share_point`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. - - Service `wasabi_cloud_storage`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. -- `application_id` (String) Field usage depends on `service` value: - - Service `algolia`: Your Algolia application ID. -- `application_key` (String, Sensitive) Field usage depends on `service` value: - - Service `cin7core`: Your Cin7 Core application key. - - Service `datadog`: Your Datadog application key. - - Service `dear`: Your Dear Application key. - - Service `partnerize`: Your Partnerize user application key. -- `apps` (Set of String) Field usage depends on `service` value: - - Service `itunes_connect`: Specific apps to sync. Must be populated if `app_sync_mode` is set to `SpecificApps`. -- `archive_log_format` (String) Field usage depends on `service` value: - - Service `sql_server_hva`: Format for archive log file names - - Service `sql_server_sap_ecc_hva`: Format for archive log file names -- `archive_log_path` (String) Field usage depends on `service` value: - - Service `sql_server_hva`: Directory where archive logs are located - - Service `sql_server_sap_ecc_hva`: Directory where archive logs are located -- `archive_pattern` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `azure_blob_storage`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `box`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `dropbox`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `email`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `ftp`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `gcs`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `google_drive`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `kinesis`: Optional. Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `s3`: Files inside compressed archives with filenames matching this regular expression will be synced. - - Service `sftp`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `share_point`: Files inside of compressed archives with filenames matching this regular expression will be synced. - - Service `wasabi_cloud_storage`: Files inside compressed archives with filenames matching this regular expression will be synced. -- `are_soap_credentials_provided` (Boolean) Field usage depends on `service` value: - - Service `marketo`: Marketo SOAP credentials provided. -- `asb_ip` (String) Field usage depends on `service` value: - - Service `azure_service_bus`: The IP address (or) the URL of ASB namespace -- `asm_option` (Boolean) Field usage depends on `service` value: - - Service `oracle_hva`: Default value: `false`. Set to `true` if you're using ASM on a non-RAC instance. - - Service `oracle_sap_hva`: Default value: `false`. Set to `true` if you are using ASM on a non-RAC instance. -- `asm_oracle_home` (String) Field usage depends on `service` value: - - Service `oracle_hva`: ASM Oracle Home path. - - Service `oracle_sap_hva`: The Oracle ASM home directory. -- `asm_password` (String, Sensitive) Field usage depends on `service` value: - - Service `oracle_hva`: ASM password. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. - - Service `oracle_sap_hva`: The ASM user's password. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. -- `asm_tns` (String) Field usage depends on `service` value: - - Service `oracle_hva`: ASM TNS. - - Service `oracle_sap_hva`: ASM TNS. -- `asm_user` (String) Field usage depends on `service` value: - - Service `oracle_hva`: ASM user. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. - - Service `oracle_sap_hva`: The ASM user. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. -- `attribution_window` (String) Field usage depends on `service` value: - - Service `amazon_ads`: Time period used to attribute conversions based on clicks. -- `attribution_window_size` (String) Field usage depends on `service` value: - - Service `tiktok_ads`: Rollback sync duration to capture conversions. Set this to your configured attribution window in TikTok Ads. The default value is 7 days. -- `audience` (String, Sensitive) Field usage depends on `service` value: - - Service `auth0`: Your Auth0 API audience. -- `auth` (String) Field usage depends on `service` value: - - Service `redshift_db`: Password-based authentication type - - Service `snowflake_db`: Password-based or key-based authentication type -- `auth_code` (String, Sensitive) Field usage depends on `service` value: - - Service `happyfox`: Your HappyFox auth code. -- `auth_environment` (String) Field usage depends on `service` value: - - Service `younium`: Your Younium auth environment. -- `auth_method` (String) Field usage depends on `service` value: - - Service `azure_sql_db`: Authentication Method. - - Service `azure_sql_managed_db`: Authentication Method. - - Service `webhooks`: The authentication mechanism you want to use -- `auth_mode` (String) Field usage depends on `service` value: - - Service `anaplan`: The Anaplan authentication method. - - Service `concur`: The Authentication Mode used by SAP Concur. It can be PasswordGrant or CompanyLevel auth mode - - Service `github`: Authorization type. -- `auth_token` (String, Sensitive) Field usage depends on `service` value: - - Service `zonka_feedback`: Your Zonka Feedback auth token. -- `auth_type` (String) Field usage depends on `service` value: - - Service `airtable`: Type of authentication being used by connector - - Service `aws_cost_report`: Access approach - - Service `azure_service_bus`: The authentication mode to access the topic - - Service `dynamics_365`: Authentication mechanism. Either one of `OAUTH2`, or `SERVICE_PRINCIPAL`. Default value `OAUTH2` - - Service `gcs`: Authorization type. Required for storage bucket authentication. - - Service `google_sheets`: The `OAuth` value must be specified for this type of authorization. - - Service `jira`: Authorization type. - - Service `mixpanel`: Authentication Method - - Service `pardot`: Authenticate using OAuth or HTTP Basic - - Service `qualtrics`: Type of authentication being used by connector - - Service `s3`: Access approach - - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage Access approach. Required for connector creation. Default value: `ACCESS_KEY`. -- `authentication_method` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: Authentication Method - - Service `elastic_cloud`: The authentication method used to connect to your cluster. - - Service `es_self_hosted`: The authentication method used to connect to your cluster. - - Service `opendistro`: The authentication method used to connect to your cluster. - - Service `opensearch`: The authentication method used to connect to your cluster. -- `aws_region_code` (String) Field usage depends on `service` value: - - Service `dynamodb`: The AWS region code for the DynamoDB instance, e.g. `us-east-1`. -- `backint_configuration_path` (String) -- `backint_executable_path` (String) -- `base_currency` (String) Field usage depends on `service` value: - - Service `open_exchange_rates`: Your Open Exchange Rates Base Currency. -- `base_domain` (String) Field usage depends on `service` value: - - Service `freshteam`: Your company's Freshteam base domain name (usually **company**.freshteam.com). -- `base_id` (String) Field usage depends on `service` value: - - Service `airtable`: ID of base in Airtable -- `base_url` (String) Field usage depends on `service` value: - - Service `aha`: Your Aha! subdomain. - - Service `billing_platform`: Your BillingPlatform subdomain. - - Service `boostr`: Your Boostr base URL. - - Service `brex`: Your Brex Base URL - - Service `centra`: Your Centra Base URL. - - Service `culture_amp`: Your Culture Amp base URL. - - Service `financial_force`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. - - Service `freshsales`: Your Freshsales product. - - Service `gongio`: Your Gong API Base URL. - - Service `ironclad`: Your Ironclad base url. - - Service `jotform`: Your Jotform base URL. - - Service `mailgun`: Your Mailgun base URL. - - Service `ortto`: Your Ortto base URL. Possible values: `api`, `api.au`, `api.eu`. - - Service `prisma_cloud`: Your Prisma Cloud admin console URL. - - Service `salesforce`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. - - Service `salesforce_sandbox`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. - - Service `veevavault`: Your Veeva Vault base URL. - - Service `vitally`: Your Vitally base URL. -- `bearer_token` (String, Sensitive) Field usage depends on `service` value: - - Service `ada`: Your Ada API Access Token. - - Service `crowddev`: Your crowd.dev Auth Token. - - Service `customerio`: Your Customer.io App API Key. - - Service `freshchat`: Your Freshchat API Token. - - Service `hopin`: Your Hopin API key. - - Service `orbit`: Your Orbit API Token. - - Service `productboard`: Your Productboard API key. - - Service `smarthr`: Your SmartHR access token. - - Service `sprout`: Your Sprout Social API Access Token. - - Service `zenefits`: Your Zenefits bearer token. -- `blob_sas_url` (String, Sensitive) Field usage depends on `service` value: - - Service `webhooks`: The blob SAS URL of your Azure container. Required if `bucket_service` is set to `AZURE`. -- `blockchain` (String) Field usage depends on `service` value: - - Service `rarible`: Your Rarible Blockchain. -- `brand_id` (String) Field usage depends on `service` value: - - Service `oracle_moat_analytics`: Your Oracle Moat Analytics Brand ID. -- `breakdowns` (Set of String) Field usage depends on `service` value: - - Service `facebook`: List of breakdowns which connector will sync. [Possible breakdowns values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#breakdowns). -- `bucket` (String) Field usage depends on `service` value: - - Service `appsflyer`: Customer S3 Bucket - - Service `aws_cloudtrail`: The AWS bucket name which is configured for AWS CloudTrail. - - Service `aws_cost_report`: The S3 bucket name with the AWS Cost and Usage Report - - Service `aws_inventory`: The AWS bucket name that is configured for AWS Config. - - Service `aws_lambda`: S3 Bucket - - Service `cloudfront`: The bucket name for CloudFront. - - Service `gcs`: The name of the GCS bucket. - - Service `google_play`: The Google Cloud Storage source bucket. - - Service `heap`: The S3 bucket name. - - Service `kinesis`: The name of the Kinesis bucket. - - Service `s3`: The S3 bucket name. Required for connector creation. - - Service `segment`: The name of the Segment bucket. Must be populated if `sync_type` is set to `S3`. - - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage bucket name. Required for connector creation. -- `bucket_name` (String) Field usage depends on `service` value: - - Service `adjust`: Your AWS S3 or GCS bucket. - - Service `google_analytics_360`: The name of the bucket. - - Service `google_analytics_4_export`: The name of the bucket. -- `bucket_service` (String) Field usage depends on `service` value: - - Service `webhooks`: Whether to store the events in Fivetran's container service or your S3 bucket. Default value: `Fivetran`. -- `business_accounts` (Set of String) Field usage depends on `service` value: - - Service `reddit_ads`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. -- `business_id` (String, Sensitive) Field usage depends on `service` value: - - Service `birdeye`: Your Birdeye Business ID. -- `business_unit_id` (String) Field usage depends on `service` value: - - Service `pardot`: Business Unit Id -- `catalog` (String) Field usage depends on `service` value: - - Service `databricks_db`: catalog to sync -- `certificate` (String, Sensitive) Field usage depends on `service` value: - - Service `anaplan`: The contents of your PEM certificate file. Must be populated if `auth_mode` is set to `Certificate`. - - Service `qualtrics`: Your Client Certificate -- `click_attribution_window` (String) Field usage depends on `service` value: - - Service `facebook`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#clickattributionwindow). - - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'click' action. -- `client` (String) -- `client_cert` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka client certificate. - - Service `heroku_kafka`: Heroku Kafka client certificate. Required for `TLS` security protocol. -- `client_cert_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka client certificate key. - - Service `heroku_kafka`: Heroku Kafka client certificate key. Required for `TLS` security protocol. -- `client_host` (String) Field usage depends on `service` value: - - Service `ceridian_dayforce`: Your Ceridian Dayforce Client Host. -- `client_id` (String, Sensitive) Field usage depends on `service` value: - - Service `acumatica`: Your Acumatica client ID. - - Service `adobe_analytics`: Client ID from the OAuth Server-to-Server or Service Account (JWT) credentials of your Adobe Project. - - Service `adobe_workfront`: Your Adobe Workfront client ID. - - Service `adp_workforce_now`: Your ADP Client ID. - - Service `akamai`: Your Akamai client ID. - - Service `auth0`: Your Auth0 client ID. - - Service `billing_platform`: Your BillingPlatform client ID. - - Service `brightcove`: Your Brightcove client ID. - - Service `brightpearl`: Your Brightpearl client id. - - Service `buildium`: Your Buildium API client ID. - - Service `canvas_by_instructure`: Your Canvas by Instructure client ID. - - Service `castor_edc`: Your Castor EDC client Id. - - Service `commercetools`: Your commercetools client ID. - - Service `concur`: The SAP Concur Client ID. - - Service `coupa`: Your Coupa client_id - - Service `criteo`: Your Criteo Client ID. - - Service `criteo_retail_media`: Your Criteo Retail Media client ID. - - Service `culture_amp`: Your Culture Amp client ID. - - Service `cvent`: Your Cvent client ID. - - Service `d2l_brightspace`: Your D2L Brightspace client ID. - - Service `ebay`: Your eBay app ID. - - Service `exact_online`: Your Exact Online client ID. - - Service `flexport`: The Flexport API Key. - - Service `genesys`: Your Genesys client ID. - - Service `hana_sap_hva_ecc_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. - - Service `hana_sap_hva_s4_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. - - Service `ilevel`: Your iLevel Client ID. - - Service `instructure`: Your Instructure client ID. - - Service `integral_ad_science`: Your integral_ad_science client id. - - Service `jama_software`: Your Jama Software client ID. - - Service `looker_source`: Your Looker Client ID. - - Service `marketo`: Marketo REST API Client Id. - - Service `medallia`: Medallia Client ID - - Service `microsoft_entra_id`: Your Microsoft Entra ID Client ID. - - Service `microsoft_teams`: Your Microsoft Teams Client ID. - - Service `navan`: Your Navan client ID. - - Service `on24`: Your ON24 client ID. - - Service `oracle_sap_hva_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. - - Service `paychex`: Your Paychex client ID. - - Service `paypal`: `Client ID` of your PayPal client application. - - Service `paypal_sandbox`: `Client ID` of your PayPal client application. - - Service `personio`: Your Personio Client ID. - - Service `piwik_pro`: Your Piwik PRO client ID. - - Service `podio`: Your Podio client ID. - - Service `power_reviews_enterprise`: Your PowerReviews Enterprise Client ID. - - Service `prisma_cloud`: Your Prisma Cloud access key ID. - - Service `procore`: Your Procore client ID. - - Service `quora_ads`: Your Quora Ads client ID. - - Service `reltio`: Your Reltio client ID. - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud Client ID. - - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud client ID. - - Service `salesloft`: `Client ID` of your Salesloft client application. - - Service `sap_success_factors`: Your SAP SuccessFactors Client ID. - - Service `servicenow`: ServiceNow Client ID. - - Service `servicetitan`: Your ServiceTitan client ID. - - Service `sharetribe`: Your Sharetribe client ID. - - Service `shipnetwork`: Your ShipNetwork client ID. - - Service `sigma_computing_source`: Your Sigma Computing client ID. - - Service `skillstx`: Your SkillsTX client ID. - - Service `smartrecruiters`: Your SmartRecruiters client ID. - - Service `splash`: Your Splash client ID. - - Service `square`: The Application ID of your organization. - - Service `standard_metrics`: Your Standard Metrics Client ID. - - Service `swoogo`: Your Swoogo client Id. - - Service `taboola`: The Taboola client ID. - - Service `talkdesk`: The Client ID of your OAuth Client - - Service `toast`: Your Toast client ID. - - Service `trelica`: Your Trelica client ID. - - Service `tymeshift`: Your Tymeshift email. - - Service `udemy_business`: Your Udemy Business client ID. - - Service `visma`: Your Visma client ID. - - Service `vonage_contact_center`: Your Vonage Contact Center client ID. - - Service `walmart_marketplace`: Your Walmart Marketplace client ID. - - Service `xero`: your clientId - - Service `xray`: Your Xray Client ID. - - Service `yougov_sport`: Your Yougov Sport client ID. - - Service `zendesk_chat`: Your Zendesk client ID. - - Service `zoho_books`: Your Zoho Books Client ID. - - Service `zoho_campaigns`: Your Zoho Campaigns Client ID. - - Service `zoho_desk`: Your Zoho Desk Client Id. - - Service `zoho_inventory`: Your Zoho Inventory client ID. - - Service `zuora`: Zuora Client ID. - - Service `zuora_sandbox`: Zuora Client ID. -- `client_key` (String, Sensitive) Field usage depends on `service` value: - - Service `appfigures`: Your Appfigures Client Key. - - Service `thinkific`: Your Thinkific client key. - - Service `yougov_sport`: Your Yougov Sport client key. -- `client_name` (String, Sensitive) Field usage depends on `service` value: - - Service `destini`: Your Destini Client Name. - - Service `medallia`: Medallia company name -- `client_namespace` (String) Field usage depends on `service` value: - - Service `ceridian_dayforce`: Your Ceridian Dayforce Client Namespace. -- `client_private_key` (String, Sensitive) Field usage depends on `service` value: - - Service `aurora_postgres`: Client Private Key in .pem format. - - Service `azure_postgres`: Client Private Key in .pem format. - - Service `google_cloud_postgresql`: Client Private Key in .pem format. - - Service `heroku_postgres`: Client Private Key in .pem format. - - Service `postgres`: Client Private Key in .pem format. - - Service `postgres_rds`: Client Private Key in .pem format. -- `client_public_certificate` (String) Field usage depends on `service` value: - - Service `aurora_postgres`: Client Certificate in .pem format. - - Service `azure_postgres`: Client Certificate in .pem format. - - Service `google_cloud_postgresql`: Client Certificate in .pem format. - - Service `heroku_postgres`: Client Certificate in .pem format. - - Service `postgres`: Client Certificate in .pem format. - - Service `postgres_rds`: Client Certificate in .pem format. -- `client_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `acumatica`: Your Acumatica client secret. - - Service `adobe_analytics`: Client Secret from the OAuth Server-to-Server or Service Account (JWT) credentials of your Adobe Project. - - Service `adobe_workfront`: Your Adobe Workfront client secret. - - Service `adp_workforce_now`: Your ADP Client Secret. - - Service `akamai`: Your Akamai client secret. - - Service `auth0`: Your Auth0 client Secret. - - Service `billing_platform`: Your BillingPlatform client secret. - - Service `brightcove`: Your Brightcove client secret. - - Service `brightpearl`: Your Brightpearl client secret. - - Service `canvas_by_instructure`: Your Canvas by Instructure client secret. - - Service `castor_edc`: Your Castor EDC Client Secret. - - Service `commercetools`: Your commercetools client secret. - - Service `concur`: The SAP Concur Client secret. - - Service `coupa`: Your Coupa client_id - - Service `criteo`: Your Criteo client secret key. - - Service `criteo_retail_media`: Your Criteo Retail Media client Secret. - - Service `culture_amp`: Your Culture Amp client secret. - - Service `cvent`: Your Cvent client secret. - - Service `d2l_brightspace`: Your D2L Brightspace client secret. - - Service `ebay`: Your eBay cert ID. - - Service `exact_online`: Your Exact Online client secret. - - Service `flexport`: The Flexport API Secret. - - Service `genesys`: Your Genesys client secret. - - Service `ilevel`: Your iLevel Client Secret. - - Service `instructure`: Your Instructure client secret. - - Service `integral_ad_science`: Your integral_ad_science client secret. - - Service `jama_software`: Your Jama Software client secret. - - Service `looker_source`: Your Looker Client Secret. - - Service `marketo`: Marketo REST API Client Secret. - - Service `medallia`: Medallia Client Secret key - - Service `microsoft_entra_id`: Your Microsoft Entra ID Client Secret. - - Service `microsoft_teams`: Your Microsoft Teams Client Secret. - - Service `navan`: Your Navan client secret. - - Service `paychex`: Your Paychex client secret. - - Service `personio`: Your Personio secret. - - Service `piwik_pro`: Your Piwik PRO client secret. - - Service `podio`: Your Podio client secret. - - Service `power_reviews_enterprise`: Your PowerReviews Enterprise Client Secret. - - Service `prisma_cloud`: Your Prisma Cloud secret access Key. - - Service `procore`: Your Procore client secret. - - Service `quora_ads`: Your Quora Ads client secret. - - Service `reltio`: Your Reltio client secret. - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud Client secret. - - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud client secret. - - Service `salesloft`: `Client Secret` of your Salesloft client application. - - Service `sap_success_factors`: Your SAP SuccessFactors Client Secret that you generated through SAML Assertion. - - Service `servicenow`: ServiceNow Client Secret. - - Service `servicetitan`: Your ServiceTitan secret key. - - Service `sharetribe`: Your Sharetribe client secret. - - Service `sigma_computing_source`: Your Sigma Computing client secret. - - Service `skillstx`: Your SkillsTX client secret. - - Service `smartrecruiters`: Your SmartRecruiters client secret. - - Service `splash`: Your Splash client secret. - - Service `square`: The Application Secret of your organization. - - Service `standard_metrics`: Your Standard Metrics Client secret. - - Service `swoogo`: Your Swoogo Client Secret. - - Service `taboola`: The Taboola client secret. - - Service `talkdesk`: The Client Secret of your OAuth Client - - Service `thinkific`: Your Thinkific client secret. - - Service `toast`: Your Toast client secret. - - Service `trelica`: Your Trelica client secret. - - Service `tymeshift`: Your Tymeshift password. - - Service `udemy_business`: Your Udemy Business client secret. - - Service `visma`: Your Visma client secret. - - Service `vonage_contact_center`: Your Vonage Contact Center client secret. - - Service `walmart_marketplace`: Your Walmart Marketplace client secret. - - Service `xero`: your clientSecret - - Service `xray`: Your Xray Client Secret. - - Service `zendesk_chat`: Your Zendesk client secret. - - Service `zoho_books`: Your Zoho Books Client Secret. - - Service `zoho_campaigns`: Your Zoho Campaigns Client Secret. - - Service `zoho_desk`: Your Zoho Desk Client secret. - - Service `zoho_inventory`: Your Zoho Inventory client secret. - - Service `zuora`: Zuora Client Secret. - - Service `zuora_sandbox`: Zuora Client Secret. -- `cloud_storage_type` (String) Field usage depends on `service` value: - - Service `braze`: Cloud storage type Braze Current is connected to. -- `collection_address` (String) Field usage depends on `service` value: - - Service `rarible`: Your Rarible Collection Address. -- `columns` (Set of String) Field usage depends on `service` value: - - Service `double_click_publishers`: Columns provide all trafficking statistics and revenue information available for the chosen Dimensions. -- `companies` (Set of String) Field usage depends on `service` value: - - Service `business_central`: List of companies to sync -- `company` (String) Field usage depends on `service` value: - - Service `ordway`: Your Ordway company name. -- `company_id` (String) Field usage depends on `service` value: - - Service `360learning`: Your 360Learning Company ID. - - Service `sage_intacct`: Company ID - - Service `sap_success_factors`: Your SAP SuccessFactors Company ID. -- `company_ids` (String) Field usage depends on `service` value: - - Service `cj_commission_detail`: Your CJ Commission Detail company IDs. -- `company_key` (String, Sensitive) Field usage depends on `service` value: - - Service `khoros_care`: Your Khoros Care companyKey. - - Service `upland`: Your Upland Software Company Key. -- `company_request_token` (String, Sensitive) Field usage depends on `service` value: - - Service `concur`: The SAP Concur Company Request Token -- `company_uuid` (String) Field usage depends on `service` value: - - Service `concur`: The SAP Concur Company UUID -- `compression` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: If your files are compressed, but do not have extensions indicating the compression method, you can force them to be uncompressed according to the selected compression algorithm. Leave the value as infer if your files are saved with the correct compression extensions. - - Service `azure_blob_storage`: The secrets that should be passed to the function at runtime. - - Service `box`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. - - Service `dropbox`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. - - Service `email`: The secrets that should be passed to the function at runtime. - - Service `ftp`: The secrets that should be passed to the function at runtime. - - Service `gcs`: The secrets that should be passed to the function at runtime. - - Service `google_drive`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. - - Service `kinesis`: If your files are compressed, but do not have extensions indicating the compression method, you can force them to be uncompressed according to the selected compression algorithm. Leave the value as infer if your files are saved with the correct compression extensions. - - Service `s3`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. - - Service `sftp`: The secrets that should be passed to the function at runtime. - - Service `share_point`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. - - Service `wasabi_cloud_storage`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. -- `config_method` (String) Field usage depends on `service` value: - - Service `google_display_and_video_360`: The report configuration method. Specifies whether a new configuration is defined manually or an existing configuration is reused. The default value is `CREATE_NEW`. -- `config_repository_url` (String) Field usage depends on `service` value: - - Service `snowplow`: Public repository URL containing JSON configuration files. -- `config_type` (String) Field usage depends on `service` value: - - Service `facebook`: Option to select Prebuilt Reports or Custom Reports. [Possible config_type values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#configtype). - - Service `google_analytics`: Whether to use the [Prebuilt Reports or Custom Reports](https://fivetran.com/docs/connectors/applications/google-analytics#schemainformation). -- `connecting_user` (String) -- `connecting_user_email` (String) -- `connection_method` (String) Field usage depends on `service` value: - - Service `aws_msk`: How Fivetran connects to your message brokers in the cluster - - Service `azure_blob_storage`: Connection Method. Possible values: `DIRECT`: Fivetran will connect directly to your storage container, `SSH_TUNNEL`: Fivetran will connect to your storage container using a host machine (commonly used for VPN connections), `PRIVATE_LINK`: Fivetran will connect to your storage container using PrivateLink. - - Service `azure_function`: Connection Method. Possible values: `DIRECT`: Fivetran will connect directly to your function, `PRIVATE_LINK`: Fivetran will connect to your storage container using PrivateLink. - - Service `azure_service_bus`: The connection method - - Service `sftp`: The connection method used to connect to SFTP Server. -- `connection_name` (String) Field usage depends on `service` value: - - Service `appsflyer`: Your Data Locker Connection Name. Default value: `data-locker-hourly/ -- `connection_string` (String, Sensitive) Field usage depends on `service` value: - - Service `azure_blob_storage`: The blob storage container connection string. - - Service `azure_event_hub`: Connection string of the Event Hub Namespace you want to sync. - - Service `azure_service_bus`: The connection string used for authentication. Required if the authentication type is `ConnectionString` - - Service `microsoft_dynamics_365_fno`: The blob storage container's connection string. - - Service `microsoft_dynamics_365_fo`: The blob storage container connection string. -- `connection_type` (String) Field usage depends on `service` value: - - Service `aurora`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `aurora_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `aws_cost_report`: Connection method. Default value: `Directly`. - - Service `aws_lambda`: Connection method. Default value: `Directly`. - - Service `azure_blob_storage`: Connection method. Default value: `Directly`. - - Service `azure_cosmos_for_mongo`: Possible values:`Directly`, `PrivateLink`. `Directly` is the default value - - Service `azure_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `azure_sql_db`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `azure_sql_managed_db`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `bigquery_db`: Direct or PrivateLink connection - - Service `clarity`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `cockroachdb`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `db2i_hva`: Possible values:`SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and the following parameter's values are specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. - - Service `db2i_sap_hva`: Possible values:`SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. - - Service `documentdb`: Possible values:`SshTunnel`, `PrivateLink` . `SshTunnel` is used as a value if this parameter is omitted in the request and the following parameter's values are specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. - - Service `dynamics_365_fo`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `ehr`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `elastic_cloud`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `email`: Connection method. Default value: `Directly`. - - Service `es_self_hosted`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `google_cloud_mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `google_cloud_postgresql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `google_cloud_sqlserver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `hana_sap_hva_b1`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `hana_sap_hva_ecc`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `hana_sap_hva_ecc_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `hana_sap_hva_s4`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `hana_sap_hva_s4_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `heroku_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `kinesis`: Connection method. Default value: `Directly`. - - Service `magento_mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `magento_mysql_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `maria`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `maria_azure`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `maria_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `mongo`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `mongo_sharded`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `mysql_azure`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `mysql_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `opendistro`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `opensearch`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_ebs`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_hva`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_rac`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_sap_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `oracle_sap_hva_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `postgres_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `redshift_db`: Directly or Private Link - - Service `s3`: Connection method. Default value: `Directly`. - - Service `sap_hana`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `sap_s4hana`: Connection Method - - Service `snowflake_db`: Directly or Private Link - - Service `sql_server`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `sql_server_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `sql_server_rds`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. - - Service `sql_server_sap_ecc_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. -- `console_url` (String) Field usage depends on `service` value: - - Service `prisma_cloud`: Your Prisma Cloud console URL. -- `consumer_group` (String) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka consumer group name. - - Service `aws_msk`: The name of consumer group created for Fivetran. - - Service `azure_event_hub`: Name of consumer group created for Fivetran. - - Service `confluent_cloud`: Confluent Cloud consumer group name. - - Service `heroku_kafka`: Heroku Kafka consumer group name. -- `consumer_key` (String, Sensitive) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: Consumer Key - - Service `twitter`: API Key of your app - - Service `twitter_ads`: The Twitter App consumer key. - - Service `woocommerce`: Your WooCommerce Consumer key. -- `consumer_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: Consumer Secret - - Service `twitter`: API Secret of your app - - Service `twitter_ads`: The Twitter App consumer secret. - - Service `woocommerce`: Your WooCommerce Consumer secret. -- `container_address` (String) Field usage depends on `service` value: - - Service `azure_blob_storage`: IP address of the Azure Storage Container which is accessible from host machine. -- `container_name` (String) Field usage depends on `service` value: - - Service `azure_blob_storage`: The name of the blob container. - - Service `microsoft_dynamics_365_fno`: The name of the blob container. - - Service `microsoft_dynamics_365_fo`: The name of the blob container. -- `content_owner_id` (String) Field usage depends on `service` value: - - Service `youtube_analytics`: Used only for Content Owner reports. The ID of the content owner for whom the API request is being made. -- `conversation_webhook_url` (String) Field usage depends on `service` value: - - Service `helpscout`: Your conversation webhook URL -- `conversion_dimensions` (Set of String) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: Conversion Dimensions. -- `conversion_report_time` (String) Field usage depends on `service` value: - - Service `pinterest_ads`: The date that the user interacted with the ad OR completed a conversion event. -- `conversion_window_size` (Number) Field usage depends on `service` value: - - Service `google_ads`: A period of time in days during which a conversion is recorded. -- `convert_dats_type_to_date` (Boolean) -- `csv_definition` (String) Field usage depends on `service` value: - - Service `adjust`: CSV definition for the CSV export (https://help.adjust.com/en/article/csv-uploads#how-do-i-format-my-csv-definition). -- `currency` (String) Field usage depends on `service` value: - - Service `criteo`: Currency -- `custom_event_sync_mode` (String) Field usage depends on `service` value: - - Service `iterable`: Custom Events Sync Mode. -- `custom_events` (Set of String) Field usage depends on `service` value: - - Service `iterable`: List of custom events to sync. Should be specified when `custom_event_sync_mode` is `SelectedEvents` -- `custom_field_ids` (Set of String) Field usage depends on `service` value: - - Service `double_click_publishers`: The list of custom field IDs included in the report. Custom fields can only be selected with their corresponding dimensions. -- `custom_floodlight_variables` (Set of String) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: Custom Floodlight variables enable you to capture information beyond the basics (visits and revenue) that you can collect with standard parameters in your tags. -- `custom_payloads` (Block Set) (see [below for nested schema](#nestedblock--config--custom_payloads)) -- `custom_reports` (Block Set) (see [below for nested schema](#nestedblock--config--custom_reports)) -- `custom_tables` (Block Set) (see [below for nested schema](#nestedblock--config--custom_tables)) -- `custom_url` (String) Field usage depends on `service` value: - - Service `dbt_cloud`: Your dbt Cloud access url. - - Service `jotform`: Your Jotform custom base URL. -- `customer_api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `ukg_pro`: Your UKG Pro Customer API key. -- `customer_id` (String) Field usage depends on `service` value: - - Service `google_ads`: ID of the customer, can be retrieved from your AdWords dashboard. -- `customer_list_id` (String) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The parameter to retrieve customer details. -- `daily_api_call_limit` (Number) -- `data_access_method` (String) Field usage depends on `service` value: - - Service `cosmos`: The source data access method. Supported values:`ACCOUNT_KEY`- Data access method that uses account keys to authenticate to the source database. It comes in both read-write and read-only variants.`RESOURCE_TOKEN`- Fine-grained permission model based on native Azure Cosmos DB users and permissions. Learn more in our [Azure Cosmos DB Data Access Methods documentation](https://fivetran.com/docs/connectors/databases/cosmos#dataaccessmethods). -- `data_center` (String) Field usage depends on `service` value: - - Service `brightpearl`: Your Brightpearl data center. - - Service `qualtrics`: Data center ID of the Qualtrics account. Can be found in the URL before `qualtrics.com`. (For example, if your URL is `youraccount.ca1.qualtrics.com`, then the data center is `ca1`.) - - Service `zoho_crm`: Data Center, depending on the Domain name -- `data_center_id` (String) Field usage depends on `service` value: - - Service `zonka_feedback`: Your Zonka Feedback data center ID. -- `data_set_name` (String) Field usage depends on `service` value: - - Service `bigquery_db`: Data set name -- `database` (String) Field usage depends on `service` value: - - Service `aurora`: The database name. - - Service `aurora_postgres`: The database name. - - Service `azure_postgres`: The database name. - - Service `azure_sql_db`: The database name. - - Service `azure_sql_managed_db`: The database name. - - Service `clarity`: The database name. - - Service `cockroachdb`: The database name. - - Service `db2i_hva`: The database name. - - Service `db2i_sap_hva`: The database name. - - Service `dynamics_365_fo`: The database name. - - Service `ehr`: The database name. - - Service `google_cloud_mysql`: The database name. - - Service `google_cloud_postgresql`: The database name. - - Service `google_cloud_sqlserver`: The database name. - - Service `hana_sap_hva_b1`: The Hana database name. - - Service `hana_sap_hva_ecc`: The Hana database name. - - Service `hana_sap_hva_ecc_netweaver`: The Hana database name. - - Service `hana_sap_hva_s4`: The Hana database name. - - Service `hana_sap_hva_s4_netweaver`: The Hana database name. - - Service `heroku_postgres`: The database name. - - Service `magento_mysql`: The database name. - - Service `magento_mysql_rds`: The database name. - - Service `maria`: The database name. - - Service `maria_azure`: The database name. - - Service `maria_rds`: The database name. - - Service `mysql`: The database name. - - Service `mysql_azure`: The database name. - - Service `mysql_rds`: The database name. - - Service `oracle`: The database name. - - Service `oracle_ebs`: The database name. - - Service `oracle_hva`: The database name. - - Service `oracle_rac`: The database name. - - Service `oracle_rds`: The database name. - - Service `oracle_sap_hva`: The database name. - - Service `postgres`: The database name. - - Service `postgres_rds`: The database name. - - Service `redshift_db`: The database name: Redshift - - Service `sap_hana`: The database name. - - Service `sap_s4hana`: The database name. - - Service `snowflake_db`: The database name: Snowflake - - Service `sql_server`: The database name. - - Service `sql_server_hva`: The database name. - - Service `sql_server_rds`: The database name. - - Service `sql_server_sap_ecc_hva`: The database name. -- `database_name` (String) Field usage depends on `service` value: - - Service `firebase`: Database Name -- `dataset_id` (String) Field usage depends on `service` value: - - Service `google_analytics_360`: The dataset ID. - - Service `google_analytics_4_export`: The Dataset ID. -- `datasource` (String) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: The NetSuite data source value: `NetSuite.com`. -- `date_granularity` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: The aggregation duration you want. Default value: `HOUR` . -- `db` (String) Field usage depends on `service` value: - - Service `myosh`: Your Myosh Server variables/db . -- `delimiter` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. If your files sync with the wrong number of columns, consider setting this value - - Service `azure_blob_storage`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `box`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `dropbox`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `email`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `ftp`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `gcs`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `google_drive`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `kinesis`: Optional. You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. If your files sync with the wrong number of columns, consider setting this value - - Service `s3`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `sftp`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `share_point`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. - - Service `wasabi_cloud_storage`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. -- `developer_reference` (String) Field usage depends on `service` value: - - Service `brightpearl`: Your Brightpearl developer reference. -- `dimension_attributes` (Set of String) Field usage depends on `service` value: - - Service `double_click_publishers`: Dimension attributes provide additional fields associated with a Dimension. Dimension attributes can only be selected with their corresponding Dimensions. -- `dimensions` (Set of String) Field usage depends on `service` value: - - Service `adroll`: The dimenstions that you want to sync. - - Service `double_click_campaign_manager`: Report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. The `advertiser` dimension is mandatory for `REACH` report type - - Service `double_click_publishers`: Report dimensions to include in the sync. The `date` dimension is mandatory for all the report types. - - Service `google_analytics`: The report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. - - Service `google_display_and_video_360`: The report dimensions (filters) to include into a sync. The dimension names are provided in the API format. This is a required parameter when `config_method` is set to `CREATE_NEW`. -- `direct_capture_method` (String) Field usage depends on `service` value: - - Service `oracle_hva`: Possible values:`DIRECT`, `BFILE`, `ASM`, `ARCHIVE_ONLY` - - Service `oracle_sap_hva`: Possible values:`DIRECT`, `BFILE`, `ASM`, `ARCHIVE_ONLY` -- `distributed_connector_cluster_size` (Number) Field usage depends on `service` value: - - Service `cosmos`: Specifies the total number of connectors in the Distributed Connector Cluster running in parallel. - - Service `dynamodb`: Specifies the total number of connectors in the Distributed Connector Cluster running in parallel. -- `domain` (String) Field usage depends on `service` value: - - Service `auth0`: Your Auth0 domain. - - Service `bubble`: Your Bubble app name or domain name. - - Service `confluence`: Your Confluence domain. - - Service `kustomer`: Domain is the beginning of your kustomer URL going before .kustomerapp.com, e.g. for yourcompany.kustomerapp.com the domain name is yourcompany - - Service `okta`: Your Okta domain. - - Service `pipedrive`: Your Pipedrive domain. - - Service `shopware`: Your Shopware domain. - - Service `sistrix`: Your SISTRIX domain. - - Service `solarwinds_service_desk`: Your SolarWinds Service Desk domain. - - Service `uservoice`: Domain of your UserVoice site. If it ends with ".uservoice.com", you can specify just the subdomain ("mydomain.uservoice.com" -†’ "mydomain") - - Service `zendesk`: Zendesk domain. - - Service `zendesk_sunshine`: Zendesk domain. -- `domain_host_name` (String) Field usage depends on `service` value: - - Service `workday`: Workday host name. - - Service `workday_financial_management`: Workday host name. - - Service `workday_hcm`: Workday host name. -- `domain_name` (String) Field usage depends on `service` value: - - Service `calabrio`: Your Calabrio domain name - - Service `dynamics_365`: The custom domain name associated with Dynamics 365. - - Service `helpshift`: Your Helpshift domain name. -- `domain_type` (String) Field usage depends on `service` value: - - Service `medallia`: Domain type of your Medallia URL -- `dsv_service_auth` (String, Sensitive) -- `dsv_subscription_key` (String, Sensitive) -- `ecommerce_stores` (Set of String) Field usage depends on `service` value: - - Service `mailchimp`: List of IDs of the Mailchimp E-Commerce Stores to Sync -- `elements` (Set of String) -- `email` (String) Field usage depends on `service` value: - - Service `appcues`: Your Appcues Email. - - Service `boostr`: Your Boostr email. - - Service `copper`: Your Copper email address. - - Service `email`: Send your emails to this address. - - Service `moloco`: Your Moloco account email. - - Service `netsuite_suiteanalytics`: The NetSuite user's email address. - - Service `pardot`: The email of the Pardot user. - - Service `skuvault`: Your SkuVault email. - - Service `smadex`: Your Smadex account's email ID. - - Service `zendesk`: Zendesk email. - - Service `zendesk_sunshine`: Zendesk email. -- `email_id` (String) Field usage depends on `service` value: - - Service `ordway`: Your Ordway user email ID. - - Service `planful`: Your Planful email ID. -- `empty_header` (Boolean) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `azure_blob_storage`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `box`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `dropbox`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `email`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `ftp`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `gcs`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `google_drive`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `kinesis`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `s3`: If your CSVs are headerless, set this is as `true`. When `true`, we will generate generic column names following the convention of `column_0`, `column_1`, ... `column_n` to map the rows. Default value: `false`. - - Service `sftp`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `share_point`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. - - Service `wasabi_cloud_storage`: If your CSVs are headerless, set this is as `true`. When `true`, we will generate generic column names following the convention of `column_0`, `column_1`, ... `column_n` to map the rows. Default value: `false`. -- `enable_all_dimension_combinations` (Boolean) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: Whether to enable all reach dimension combinations in the report. Default value: `false` -- `enable_archive_log_only` (Boolean) Field usage depends on `service` value: - - Service `sql_server_hva`: Use archive log only mode - - Service `sql_server_sap_ecc_hva`: Use archive log only mode -- `enable_data_extensions_syncing` (Boolean) -- `enable_distributed_connector_mode` (Boolean) Field usage depends on `service` value: - - Service `cosmos`: Enable to allow the connector to join a cluster of connectors forming a Distributed Connector Cluster. This cluster allows parallel syncs from the same source to the same destination using multiple connectors. - - Service `dynamodb`: Enable to allow the connector to join a cluster of connectors forming a Distributed Connector Cluster. This cluster allows parallel syncs from the same source to the same destination using multiple connectors. -- `enable_enrichments` (Boolean) Field usage depends on `service` value: - - Service `snowplow`: Enable Enrichments -- `enable_exports` (Boolean) Field usage depends on `service` value: - - Service `braze`: Enable User Profile Exports -- `enable_tde` (Boolean) Field usage depends on `service` value: - - Service `sql_server_hva`: Using Transparent Data Encryption (TDE) - - Service `sql_server_sap_ecc_hva`: Using Transparent Data Encryption (TDE) -- `encoded_public_key` (String) Field usage depends on `service` value: - - Service `apple_search_ads`: Use the public key to grant Fivetran access to Apple Search Ads api. -- `encryption_key` (String, Sensitive) Field usage depends on `service` value: - - Service `marketo`: Marketo SOAP API Encryption Key. -- `endpoint` (String) Field usage depends on `service` value: - - Service `branch`: Webhook Endpoint - - Service `iterable`: Register the following URL for webhooks on your Iterable dashboard. - - Service `marketo`: Marketo REST API endpoint. - - Service `snowplow`: Connection-specific collector endpoint. The collector endpoint will have the `webhooks.fivetran.com/snowplow/endpoint_ID` format. You will need it to configure Snowplow to connect with Fivetran. - - Service `webhooks`: You can send your events to https://webhooks.fivetran.com/webhooks/{endpoint} -- `engagement_attribution_window` (String) Field usage depends on `service` value: - - Service `pinterest_ads`: The number of days to use as the conversion attribution window for an engagement (i.e. closeup or save) action. -- `enriched_export` (String) Field usage depends on `service` value: - - Service `optimizely`: Enriched Events S3 bucket -- `entity_id` (String) Field usage depends on `service` value: - - Service `checkout`: Your Checkout.com entity ID. - - Service `zuora`: If `is_multi_entity_feature_enabled` is `true`, then it's `EntityId`. - - Service `zuora_sandbox`: If `is_multi_entity_feature_enabled` is `true`, then it's `EntityId`. -- `environment` (String) Field usage depends on `service` value: - - Service `bazaarvoice`: Your Bazaarvoice Environment. - - Service `buildium`: Your Buildium environment. - - Service `checkout`: Your Checkout.com environment. - - Service `concord`: Your Concord environment. - - Service `invoiced`: Your Invoiced environment. - - Service `procore`: Your Procore account environment. - - Service `reltio`: Your Reltio environment. - - Service `servicetitan`: Your ServiceTitan environment. - - Service `smarthr`: Your SmartHR environment. - - Service `trelica`: Your Trelica environment. - - Service `vts`: Your VTS environment. - - Service `younium`: Your Younium API environment. - - Service `zuora`: Zuora Sandbox Environment. This accepts either of the two values Sandbox or Central Sandbox based on your subscription. The default environment is Sandbox. - - Service `zuora_sandbox`: Zuora Sandbox Environment. This accepts either of the two values Sandbox or Central Sandbox based on your subscription. The default environment is Sandbox. -- `environment_name` (String) Field usage depends on `service` value: - - Service `business_central`: Name of the environment -- `escape_char` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `azure_blob_storage`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `box`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `dropbox`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `email`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `ftp`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `gcs`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `google_drive`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `kinesis`: Optional. If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `s3`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `sftp`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `share_point`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. - - Service `wasabi_cloud_storage`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. -- `escape_char_options` (String) Field usage depends on `service` value: - - Service `gcs`: Approach used by CSV parser. Default value: `CUSTOM_ESCAPE_CHAR`. required for CSV parsing when `non_standard_escape_char` is `true`. - - Service `s3`: Approach used by CSV parser. Default value: `CUSTOM_ESCAPE_CHAR`. required for CSV parsing when `non_standard_escape_char` is `true`. -- `eu_region` (Boolean) Field usage depends on `service` value: - - Service `kustomer`: Turn it on if your app is on EU region - - Service `survey_monkey`: The SurveyMonkey account region. Specify `true`, if your account is hosted in the EU region. Default value is `false`. -- `events` (Set of String) Field usage depends on `service` value: - - Service `iterable`: List of events to sync. Should be specified when `sync_mode` is `SelectedEvents` -- `export_storage_type` (String) Field usage depends on `service` value: - - Service `adjust`: Your cloud storage. - - Service `braze`: Export Storage. Required if `enable_exports` is `true` -- `external_id` (String) Field usage depends on `service` value: - - Service `appsflyer`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a - - Service `aws_cloudtrail`: This is the same as your `group_id`, used for authentication along with the `role_arn`. - - Service `aws_cost_report`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a - - Service `aws_lambda`: The external ID is a string that designates who can assume the role. - - Service `aws_msk`: This is the same as your `group_id`. This is required when `sasl_mechanism` is set to `IAM`. - - Service `azure_function`: External ID. - - Service `cloudfront`: This is the same as your `group_id`, used for authentication along with the `role_arn`. - - Service `dynamodb`: This is the same as your `group_id`, used for authentication along with the `role_arn`. - - Service `kinesis`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a - - Service `s3`: Used for authentication along with the `role_arn`. If not provided, it uses connector's `group_id`. Use the [List All Groups endpoint](https://fivetran.com/docs/rest-api/groups#listallgroups) to find the `group_id`. - - Service `segment`: The external ID is a string that designates who can assume the role. For more information, see a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"Amazon's AWS Identity and Access Management User Guide/a. -- `facility_codes` (String) Field usage depends on `service` value: - - Service `unicommerce`: Your uniware facility codes. -- `fields` (Set of String) Field usage depends on `service` value: - - Service `facebook`: List of fields which connector will sync. [Possible field values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#fields). -- `file_type` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: If your files are saved with improper extensions, you can force them to by synced as the selected filetype. Leave the value as infer if your files have the correct extensions - - Service `azure_blob_storage`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. - - Service `box`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. - - Service `dropbox`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. - - Service `email`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. - - Service `ftp`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. - - Service `gcs`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. - - Service `google_drive`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. - - Service `kinesis`: If your files are saved with improper extensions, you can force them to by synced as the selected filetype. Leave the value as infer if your files have the correct extensions - - Service `s3`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. - - Service `sftp`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. - - Service `share_point`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. - - Service `wasabi_cloud_storage`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. -- `filter` (String) Field usage depends on `service` value: - - Service `google_analytics`: String parameter restricts the data returned for your report. To use the filter parameter, specify a dimension or metric on which to filter, followed by the filter expression -- `finance_account_sync_mode` (String) Field usage depends on `service` value: - - Service `itunes_connect`: Whether to sync all finance accounts or specific finance accounts. -- `finance_accounts` (Set of String) Field usage depends on `service` value: - - Service `itunes_connect`: Specific finance accounts to sync. Must be populated if `finance_account_sync_mode` is set to `SpecificFinanceAccounts`. -- `folder` (String) Field usage depends on `service` value: - - Service `dropbox`: Your Dropbox Folder URL. -- `folder_id` (String) Field usage depends on `service` value: - - Service `box`: Folder URL - - Service `google_drive`: Folder URL -- `folder_path` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Folder Path - - Service `one_drive`: Your OneDrive folder URL - - Service `oracle_business_intelligence_publisher`: The folder path to save data models and reports. -- `forecast_id` (String) Field usage depends on `service` value: - - Service `clari`: Your Clari Forecast id . -- `ftp_host` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: FTP host. -- `ftp_password` (String, Sensitive) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: FTP password. -- `ftp_port` (Number) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: FTP port. -- `ftp_user` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: FTP user. -- `function` (String) Field usage depends on `service` value: - - Service `aws_lambda`: The name of your AWS Lambda Function. -- `function_app` (String) Field usage depends on `service` value: - - Service `azure_function`: Function app name in Azure portal. -- `function_key` (String, Sensitive) Field usage depends on `service` value: - - Service `azure_function`: Function key used for authorization. -- `function_name` (String) Field usage depends on `service` value: - - Service `azure_function`: Name of the function to be triggered. -- `function_trigger` (String, Sensitive) Field usage depends on `service` value: - - Service `google_cloud_function`: The trigger URL of the cloud function. -- `gcs_bucket` (String) Field usage depends on `service` value: - - Service `braze`: Your GCS bucket. Required if `GCS` is the `cloud_storage_type` - - Service `webhooks`: The GCS bucket name. Required if `bucket_service` is set to `GCS`. -- `gcs_export_bucket` (String) Field usage depends on `service` value: - - Service `braze`: Your GCS bucket. Required if `GCS` is the `export_storage_type` -- `gcs_export_folder` (String) Field usage depends on `service` value: - - Service `braze`: Your GCS folder name. Required if `GCS` is the `export_storage_type` -- `gcs_folder` (String) Field usage depends on `service` value: - - Service `braze`: Your GCS folder name. Required if `GCS` is the `cloud_storage_type` -- `generate_fivetran_pk` (Boolean) Field usage depends on `service` value: - - Service `workday`: Select this option to generate a Primary Key for reports where no single column or combination of columns can be used to form a Primary Key. -- `group_name` (String) Field usage depends on `service` value: - - Service `fivetran_log`: (Optional) The group name of the `target_group_id`. -- `hana_backup_password` (String, Sensitive) -- `hana_mode` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_b1`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. - - Service `hana_sap_hva_ecc`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. - - Service `hana_sap_hva_ecc_netweaver`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. - - Service `hana_sap_hva_s4`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. - - Service `hana_sap_hva_s4_netweaver`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. -- `has_manage_permissions` (Boolean) Field usage depends on `service` value: - - Service `azure_service_bus`: The boolean value specifying whether the connection string has manage permissions -- `historic_sync_time_frame` (String) Field usage depends on `service` value: - - Service `klaviyo`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. - - Service `marketo`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. - - Service `salesforce_marketing_cloud`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. -- `historical_sync_limit` (String) Field usage depends on `service` value: - - Service `pardot`: The time range for which historical data should be synced. Default value: `All Time`. -- `home_folder` (String) Field usage depends on `service` value: - - Service `appsflyer`: Your S3 home folder path of the Data Locker. -- `host` (String) Field usage depends on `service` value: - - Service `aurora`: DB instance host or IP address. - - Service `aurora_postgres`: DB instance host or IP address. - - Service `azure_postgres`: DB instance host or IP address. - - Service `azure_sql_db`: DB instance host or IP address. - - Service `azure_sql_managed_db`: DB instance host or IP address. - - Service `clarity`: DB instance host or IP address. - - Service `cockroachdb`: DB instance host or IP address. - - Service `commercetools`: Your commercetools host. - - Service `databricks_db`: The host URL for your Databricks account. - - Service `db2i_hva`: A host address of the primary node. It should be a DB instance host/IP address with a port number. - - Service `db2i_sap_hva`: DB instance host or IP address. - - Service `documentdb`: Host IP address of the primary node. Ignored if `hosts` value is provided. - - Service `dynamics_365_fo`: DB instance host or IP address. - - Service `ehr`: DB instance host or IP address. - - Service `elastic_cloud`: DB instance host or IP address. - - Service `es_self_hosted`: DB instance host or IP address. - - Service `ftp`: FTP host address. - - Service `google_cloud_mysql`: DB instance host or IP address. - - Service `google_cloud_postgresql`: DB instance host or IP address. - - Service `google_cloud_sqlserver`: DB instance host or IP address. - - Service `hana_sap_hva_b1`: DB instance host or IP address. - - Service `hana_sap_hva_ecc`: DB instance host or IP address. - - Service `hana_sap_hva_ecc_netweaver`: DB instance host or IP address. - - Service `hana_sap_hva_s4`: DB instance host or IP address. - - Service `hana_sap_hva_s4_netweaver`: DB instance host or IP address. - - Service `heroku_postgres`: DB instance host or IP address. - - Service `jira`: The Jira service host address. - - Service `magento_mysql`: DB instance host or IP address. - - Service `magento_mysql_rds`: DB instance host or IP address. - - Service `maria`: DB instance host or IP address. - - Service `maria_azure`: DB instance host or IP address. - - Service `maria_rds`: DB instance host or IP address. - - Service `marin`: The Marin host address. - - Service `mysql`: DB instance host or IP address. - - Service `mysql_azure`: DB instance host or IP address. - - Service `mysql_rds`: DB instance host or IP address. - - Service `netsuite_suiteanalytics`: The NetSuite service host address. - - Service `opendistro`: DB instance host or IP address. - - Service `opensearch`: DB instance host or IP address. - - Service `oracle`: DB instance host or IP address. - - Service `oracle_ebs`: DB instance host or IP address. - - Service `oracle_hva`: DB instance host or IP address. - - Service `oracle_rac`: DB instance host or IP address. - - Service `oracle_rds`: DB instance host or IP address. - - Service `oracle_sap_hva`: DB instance host or IP address. - - Service `oracle_sap_hva_netweaver`: DB instance host or IP address. - - Service `postgres`: DB instance host or IP address. - - Service `postgres_rds`: DB instance host or IP address. - - Service `redshift_db`: Host name - - Service `sap_hana`: The SAP HANA host or IP address. - - Service `sap_s4hana`: The SAP S/4 host or IP address. - - Service `sftp`: SFTP host address. - - Service `snowflake_db`: Host name - - Service `splunk`: The Splunk service host address. - - Service `sql_server`: DB instance host or IP address. - - Service `sql_server_hva`: DB instance host or IP address. - - Service `sql_server_rds`: DB instance host or IP address. - - Service `sql_server_sap_ecc_hva`: DB instance host or IP address. -- `host_ip` (String) Field usage depends on `service` value: - - Service `azure_blob_storage`: IP address of host tunnel machine which is used to connect to the Storage container. - - Service `azure_service_bus`: The IP address of the host machine which we use to connect to ASB via ssh -- `host_name` (String) Field usage depends on `service` value: - - Service `coassemble`: Your Coassemble Hostname. - - Service `datadog`: Your Datadog host name. -- `host_url` (String) Field usage depends on `service` value: - - Service `adobe_commerce`: Your Adobe Commerce host url. - - Service `sparkpost`: Your SparkPost host URL. -- `host_user` (String) Field usage depends on `service` value: - - Service `azure_blob_storage`: Username in the host machine. - - Service `azure_service_bus`: The username on the host machine which we use to connect to ASB via ssh -- `hostname` (String) Field usage depends on `service` value: - - Service `akamai`: Your Akamai hostname. - - Service `ukg_pro`: Your UKG Pro hostname. -- `hosts` (Set of String) Field usage depends on `service` value: - - Service `azure_cosmos_for_mongo`: A list of host addresses for Azure Cosmos DB for Mongo DB. - - Service `mongo`: A list of host addresses of the primary node and all replicas. Each list item is either: a DB instance host/IP address with a port number, or SRV host record. - - Service `mongo_sharded`: A list of host addresses of the primary node and all replicas. Each list item is either: a DB instance host/IP address with a port number, or SRV host record. -- `http_path` (String) Field usage depends on `service` value: - - Service `databricks_db`: http path -- `identifier` (String) Field usage depends on `service` value: - - Service `playvox_workforce_management`: Your Playvox Workforce Management Identifier. - - Service `statistics_netherlands_cbs`: Your Statistics Netherlands CBS catalog identifier. -- `identity` (String) Field usage depends on `service` value: - - Service `marketo`: Marketo REST API identity url. -- `include_ocapi_endpoints` (Boolean) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: Whether to sync data through OCAPI endpoints. -- `instance` (String) Field usage depends on `service` value: - - Service `acumatica`: Your Acumatica instance name. - - Service `coupa`: The instance name of your Coupa account in the URL. - - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud instance ID - - Service `servicenow`: ServiceNow Instance ID. -- `instance_number` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_b1`: Two-digit number (00-97) of the SAP instance within its host. - - Service `hana_sap_hva_ecc`: Two-digit number (00-97) of the SAP instance within its host. - - Service `hana_sap_hva_ecc_netweaver`: Two-digit number (00-97) of the SAP instance within its host. - - Service `hana_sap_hva_s4`: Two-digit number (00-97) of the SAP instance within its host. - - Service `hana_sap_hva_s4_netweaver`: Two-digit number (00-97) of the SAP instance within its host. - - Service `oracle_sap_hva_netweaver`: Two-digit number (00-97) of the SAP instance within its host. -- `instance_url` (String) Field usage depends on `service` value: - - Service `sap_business_by_design`: The SAP Business ByDesign instance URL. -- `integration_key` (String, Sensitive) Field usage depends on `service` value: - - Service `pendo`: The integration key of the Pendo account. -- `is_account_level_connector` (Boolean) Field usage depends on `service` value: - - Service `fivetran_log`: (Optional) Retrieve account-level logs. -- `is_auth2_enabled` (Boolean) Field usage depends on `service` value: - - Service `apple_search_ads`: The contents of your PEM certificate file. Default value: `false` -- `is_custom_api_credentials` (Boolean) Field usage depends on `service` value: - - Service `twitter_ads`: Custom API credentials -- `is_external_activities_endpoint_selected` (Boolean) Field usage depends on `service` value: - - Service `pardot`: Whether the `EXTERNAL_ACTIVITY` table must be synced or not. -- `is_ftps` (Boolean) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Use Secure FTP (FTPS). -- `is_keypair` (Boolean) Field usage depends on `service` value: - - Service `sftp`: Whether to use a key pair for authentication. When `true`, do not use `password`. -- `is_multi_entity_feature_enabled` (Boolean) Field usage depends on `service` value: - - Service `zuora`: Set to `true` if there are multiple entities in your Zuora account and you only want to use one entity. Otherwise, set to `false`. - - Service `zuora_sandbox`: Set to `true` if there are multiple entities in your Zuora account and you only want to use one entity. Otherwise, set to `false`. -- `is_new_package` (Boolean) Field usage depends on `service` value: - - Service `salesforce_marketing_cloud`: Indicates that that your installed package uses OAuth 2.0. Default value: `false` -- `is_private_key_encrypted` (Boolean) Field usage depends on `service` value: - - Service `snowflake_db`: Indicates that a private key is encrypted. The default value: `false`. The field can be specified if authentication type is `KEY_PAIR`. -- `is_private_link_required` (Boolean) Field usage depends on `service` value: - - Service `aws_lambda`: We use PrivateLink by default if your AWS Lambda is in the same region as Fivetran. Turning on this toggle ensures that Fivetran always connects to AWS lambda over PrivateLink. Learn more in our [PrivateLink documentation](https://fivetran.com/docs/connectors/databases/connection-options#awsprivatelink). -- `is_public` (Boolean) Field usage depends on `service` value: - - Service `aws_cost_report`: Whether you are syncing from a public bucket. Default value: `false`. - - Service `kinesis`: Is the bucket public? (you don't need an AWS account for syncing public buckets!) - - Service `s3`: Whether you are syncing from a public bucket. Default value: `false`. -- `is_sailthru_connect_enabled` (Boolean) Field usage depends on `service` value: - - Service `sailthru`: Enable this if you want to sync Sailthru Connect -- `is_secure` (Boolean) Field usage depends on `service` value: - - Service `ftp`: Whether the server supports FTPS. -- `is_sftp_creds_available` (Boolean) Field usage depends on `service` value: - - Service `salesforce_marketing_cloud`: Provide SFTP credentials -- `is_single_table_mode` (Boolean) Field usage depends on `service` value: - - Service `box`: Allows the creation of connector using Merge Mode strategy. - - Service `dropbox`: Allows the creation of connector using Merge Mode strategy. - - Service `google_drive`: Allows the creation of connector using Merge Mode strategy. - - Service `sftp`: Allows the creation of connector using the specified Sync strategy. - - Service `share_point`: Allows the creation of connector using Merge Mode strategy. -- `is_vendor` (Boolean) Field usage depends on `service` value: - - Service `amazon_selling_partner`: Whether or not you have a Vendor Account. Default value: `false`. -- `json_delivery_mode` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Control how your JSON data is delivered into your destination - - Service `azure_blob_storage`: Control how your JSON data is delivered into your destination - - Service `box`: Control how your JSON data is delivered into your destination - - Service `dropbox`: Control how your JSON data is delivered into your destination - - Service `email`: Control how your JSON data is delivered into your destination - - Service `ftp`: Control how your JSON data is delivered into your destination - - Service `gcs`: Control how your JSON data is delivered into your destination - - Service `google_drive`: Control how your JSON data is delivered into your destination - - Service `kinesis`: Control how your JSON data is delivered into your destination - - Service `s3`: Control how your JSON data is delivered into your destination - - Service `sftp`: Control how your JSON data is delivered into your destination - - Service `share_point`: Control how your JSON data is delivered into your destination - - Service `wasabi_cloud_storage`: Specifies how Fivetran should handle your JSON data. Default value: `Packed`. -- `key` (String, Sensitive) Field usage depends on `service` value: - - Service `uservoice`: The UserVoice API key. -- `key_password` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_msk`: If `security_protocol` is set to `TLS`, enter your `Key Password`. -- `key_store_type` (String) Field usage depends on `service` value: - - Service `heroku_kafka`: Key Store Type -- `keystore` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_msk`: If `security_protocol` is set to `TLS`, add the `Keystore File` as Base64 encoded string. -- `keystore_password` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_msk`: If `security_protocol` is set to `TLS`, enter your `Keystore Password`. -- `legal_entity_id` (String) Field usage depends on `service` value: - - Service `younium`: Your Younium legal entity ID. -- `limit_for_api_calls_to_external_activities_endpoint` (Number) Field usage depends on `service` value: - - Service `pardot`: API limit for the external activities endpoint. -- `line_separator` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `azure_blob_storage`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `box`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `dropbox`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `email`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `ftp`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `gcs`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `google_drive`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `kinesis`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `s3`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `sftp`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `share_point`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. - - Service `wasabi_cloud_storage`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. -- `list_of_company_ids` (String) Field usage depends on `service` value: - - Service `cj_commission_detail`: Your CJ Commission Detail list of company IDs. -- `list_strategy` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. If you have a file structure where new files are always named in lexicographically increasing order such as files being named in increasing order of time, you can select codetime_based_pattern_listing/code. - - Service `kinesis`: Optional. If you have a file structure where new files are always named in lexicographically increasing order such as files being named in increasing order of time, you can select codetime_based_pattern_listing/code. - - Service `s3`: The listing strategy you want to use. Default value: `complete_listing`. -- `list_sync_mode` (String) Field usage depends on `service` value: - - Service `google_analytics_4_export`: The Sync Mode -- `log_journal` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The log journal name. - - Service `db2i_sap_hva`: The log journal name. -- `log_journal_schema` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The log journal schema. - - Service `db2i_sap_hva`: The log journal schema. -- `log_on_group` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Name of the SAP logon group. The default value is PUBLIC. This field is optional. - - Service `hana_sap_hva_s4_netweaver`: Name of the SAP logon group. The default value is PUBLIC. This field is optional. -- `login` (String) Field usage depends on `service` value: - - Service `rebound_returns`: Your ReBound Returns login. - - Service `the_trade_desk`: The Trade Desk email. It is a part of the login credentials. - - Service `walmart_dsp`: Walmart DSP email. It is a part of the login credentials. -- `login_password` (String, Sensitive) Field usage depends on `service` value: - - Service `concur`: The SAP Concur password. - - Service `sage_intacct`: The login password. It is a part of the login credentials. -- `manager_accounts` (Set of String) Field usage depends on `service` value: - - Service `google_ads`: The list of the Manager Account IDs whose clients will be synced. Must be populated if `sync_mode` is set to `ManagerAccounts`. -- `max_api_requests_per_day` (Number) Field usage depends on `service` value: - - Service `reltio`: Maximum API requests per day -- `merchant_id` (String) Field usage depends on `service` value: - - Service `afterpay`: Your Afterpay Merchant ID. - - Service `amazon_selling_partner`: The Merchant ID or Vendor Code. - - Service `avantlink`: Your AvantLink Merchant ID. - - Service `braintree`: Your Braintree merchant ID. - - Service `braintree_sandbox`: Your Braintree merchant ID. - - Service `xsolla`: Your Xsolla Merchant ID. -- `message_type` (String) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka message type. - - Service `aws_msk`: The Message type. - - Service `azure_event_hub`: Message type. - - Service `azure_service_bus`: The format of messages in the topic - - Service `confluent_cloud`: Confluent Cloud message type. - - Service `heroku_kafka`: Heroku Kafka message type. -- `metrics` (Set of String) Field usage depends on `service` value: - - Service `adroll`: The metrics that you want to sync. - - Service `criteo`: Metrics - - Service `double_click_campaign_manager`: Report metrics to include into a sync. - - Service `google_analytics`: The report metrics to include into a sync. - - Service `google_display_and_video_360`: The report metrics to include into a sync. The metric names are provided in the API format. This is a required parameter when `config_method` is set to `CREATE_NEW`. -- `named_range` (String) Field usage depends on `service` value: - - Service `google_sheets`: The name of the named data range on the sheet that contains the data to be synced. -- `namespace` (String) Field usage depends on `service` value: - - Service `azure_service_bus`: The ASB namespace which we have to sync. Required for `AzureActiveDirectory` authentication. -- `network_code` (Number) Field usage depends on `service` value: - - Service `double_click_publishers`: Network code is a unique, numeric identifier for your Ad Manager network. -- `non_standard_escape_char` (Boolean) Field usage depends on `service` value: - - Service `gcs`: Use this if your CSV generator uses non-standard ways of escaping characters. Default value: `false`. - - Service `s3`: Use this if your CSV generator uses non-standard ways of escaping characters. Default value: `false`. -- `null_sequence` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. If your CSVs use a special value indicating null, you can specify it here. - - Service `azure_blob_storage`: If your CSVs use a special value indicating null, you can specify it here. - - Service `box`: If your CSVs use a special value indicating null, you can specify it here. - - Service `dropbox`: If your CSVs use a special value indicating null, you can specify it here. - - Service `email`: If your CSVs use a special value indicating null, you can specify it here. - - Service `ftp`: If your CSVs use a special value indicating null, you can specify it here. - - Service `gcs`: If your CSVs use a special value indicating null, you can specify it here. - - Service `google_drive`: If your CSVs use a special value indicating null, you can specify it here. - - Service `kinesis`: Optional. If your CSVs use a special value indicating null, you can specify it here. - - Service `s3`: If your CSVs use a special value indicating null, you can specify it here. - - Service `sftp`: If your CSVs use a special value indicating null, you can specify it here. - - Service `share_point`: If your CSVs use a special value indicating null, you can specify it here. - - Service `wasabi_cloud_storage`: If your CSVs use a special value indicating null, you can specify it here. -- `oauth_token` (String, Sensitive) -- `oauth_token_secret` (String, Sensitive) -- `ocapi_client_id` (String) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI Client ID. -- `ocapi_client_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI Client secret. -- `ocapi_custom_object_types` (String) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI custom object types. -- `ocapi_hostname` (String) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI hostname. -- `odbc_driver_manager_library_path` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_b1`: ODBC manager library path - - Service `hana_sap_hva_ecc`: ODBC manager library path - - Service `hana_sap_hva_s4`: ODBC manager library path -- `odbc_sys_ini_path` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_b1`: odbc.ini and odbcsinst.ini location - - Service `hana_sap_hva_ecc`: odbc.ini and odbcsinst.ini location - - Service `hana_sap_hva_s4`: odbc.ini and odbcsinst.ini location -- `on_error` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `azure_blob_storage`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `box`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `dropbox`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `ftp`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `gcs`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `google_drive`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `kinesis`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `s3`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as `fail` unless you are certain that you have undesirable, malformed data. - - Service `sftp`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `share_point`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. - - Service `wasabi_cloud_storage`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as `fail` unless you are certain that you have undesirable, malformed data. -- `on_premise` (Boolean) Field usage depends on `service` value: - - Service `jira`: Whether the Jira instance is local or in cloud. -- `organization` (String) Field usage depends on `service` value: - - Service `gladly`: Your Gladly Organization Name. - - Service `statuspage`: Your Statuspage Organization ID. -- `organization_domain` (String) Field usage depends on `service` value: - - Service `adobe_workfront`: Your Adobe Workfront organization domain. -- `organization_id` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: Organization ID from the Service Account (JWT) credentials of your Adobe Project. - - Service `integrate`: Your Integrate organization ID. - - Service `megaphone`: Your Megaphone organization ID. - - Service `productive`: Your Productive Organization ID. - - Service `salesforce_commerce_cloud`: The organization ID from Salesforce Commerce Cloud account. - - Service `zoho_books`: Your Zoho Books Organization ID. - - Service `zoho_inventory`: Your Zoho Inventory organization ID. -- `organization_name` (String) Field usage depends on `service` value: - - Service `brightpearl`: Your Brightpearl organization name. - - Service `confluence`: Your Confluence organization name. -- `organizations` (Set of String) Field usage depends on `service` value: - - Service `apple_search_ads`: Organizations - - Service `snapchat_ads`: Specific organizations IDs to sync. Must be populated if `syncMode` is set to `SpecificOrganizations`. -- `packed_mode_tables` (Set of String) Field usage depends on `service` value: - - Service `azure_cosmos_for_mongo`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). - - Service `dynamodb`: List of tables to be synced in packed mode. - - Service `firebase`: Specific tables to sync. Must be populated if `packing_mode` is set to `SelectTablesForPackedMode`. - - Service `mongo`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). - - Service `mongo_sharded`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). -- `packing_mode` (String) Field usage depends on `service` value: - - Service `azure_cosmos_for_mongo`: Indicates the desired sync pack mode. Accepted values are `UsePackedModeOnly` and `UseUnpackedModeOnly`. `SelectTablesForPackedMode` is deprecated. - - Service `firebase`: Whether to sync all tables in unpacked mode or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. - - Service `klaviyo`: Packing mode for EVENT and PERSON tables. - - Service `mongo`: Whether to sync all tables in unpacked mode only, all tables in packed mode only, or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. - - Service `mongo_sharded`: Whether to sync all tables in unpacked mode only, all tables in packed mode only, or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. - - Service `optimizely`: Packing mode for conversion and decision tables. - - Service `sailthru`: Packing mode for LIST_STATE and USER tables. -- `pages` (Set of String) Field usage depends on `service` value: - - Service `facebook_pages`: Specific pages to sync. Must be populated if `sync_mode` is set to `SpecificPages`. -- `partner_code` (String) Field usage depends on `service` value: - - Service `care_quality_commission`: Your Care Quality Commission partner code. -- `partner_user_id` (String) Field usage depends on `service` value: - - Service `expensify`: Your Expensify partnerUserID. -- `partner_user_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `expensify`: Your Expensify partnerUserSecret. -- `partners` (Set of String) Field usage depends on `service` value: - - Service `google_display_and_video_360`: The list of partners to include into a sync. This parameter only takes effect when `config_method` is set to `CREATE_NEW`. - - Service `the_trade_desk`: Specific Partner IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. - - Service `walmart_dsp`: Specific Partner IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. -- `pass_phrase` (String, Sensitive) Field usage depends on `service` value: - - Service `qualtrics`: Pass Phrase -- `passphrase` (String, Sensitive) Field usage depends on `service` value: - - Service `snowflake_db`: In case private key is encrypted, you are required to enter passphrase that was used to encrypt the private key. The field can be specified if authentication type is `KEY_PAIR`. -- `password` (String, Sensitive) Field usage depends on `service` value: - - Service `absorb_lms`: Your Absorb LMS password. - - Service `adobe_commerce`: Your Adobe Commerce password. - - Service `anaplan`: Your Anaplan password. Must be populated if `auth_mode` is set to `Basic`. - - Service `appfigures`: Your Appfigures Password. - - Service `aurora`: The user's password. - - Service `aurora_postgres`: The user's password. - - Service `azure_cosmos_for_mongo`: Password used for source database authentication. - - Service `azure_postgres`: The user's password. - - Service `azure_sql_db`: The user's password. - - Service `azure_sql_managed_db`: The user's password. - - Service `boostr`: Your Boostr password. - - Service `ceridian_dayforce`: Your Ceridian Dayforce Password. - - Service `cin7`: Your Cin7 API Key. - - Service `clarity`: The user's password. - - Service `cockroachdb`: The user's password. - - Service `collibra`: Your collibra password. - - Service `contrast_security`: Your Contrast Security API Password. - - Service `db2i_hva`: The user's password. - - Service `db2i_sap_hva`: The user's password. - - Service `documentdb`: The user's password. - - Service `dynamics_365_fo`: The user's password. - - Service `ehr`: The user's password. - - Service `elastic_cloud`: The user's password. - - Service `es_self_hosted`: The user's password. - - Service `ftp`: FTP password. - - Service `globalmeet`: Your GlobalMeet Password. - - Service `google_cloud_mysql`: The user's password. - - Service `google_cloud_postgresql`: The user's password. - - Service `google_cloud_sqlserver`: The user's password. - - Service `green_power_monitor`: Your GreenPowerMonitor password. - - Service `guru`: Your Guru password. - - Service `hana_sap_hva_b1`: The user's password. - - Service `hana_sap_hva_ecc`: The user's password. - - Service `hana_sap_hva_ecc_netweaver`: The user's password. - - Service `hana_sap_hva_s4`: The user's password. - - Service `hana_sap_hva_s4_netweaver`: The user's password. - - Service `heroku_postgres`: The user's password. - - Service `impact`: Your Impact Account Token - - Service `integral_ad_science`: Your integral_ad_science password. - - Service `itunes_connect`: Your password - - Service `jamf`: Your Jamf password. - - Service `jira`: The Jira user's password. - - Service `khoros_care`: Your Khoros Care password. - - Service `kissmetrics`: Your Kissmetrics API Password. - - Service `klarna`: Your Klarna Password. - - Service `lessonly`: Your Lessonly password. - - Service `magento_mysql`: The user's password. - - Service `magento_mysql_rds`: The user's password. - - Service `maria`: The user's password. - - Service `maria_azure`: The user's password. - - Service `maria_rds`: The user's password. - - Service `marin`: The Marin user's password. - - Service `moloco`: Your Moloco account password. - - Service `mongo`: The user's password. - - Service `mongo_sharded`: The user's password. - - Service `myosh`: Your myosh password. - - Service `mysql`: The user's password. - - Service `mysql_azure`: The user's password. - - Service `mysql_rds`: The user's password. - - Service `netsuite_suiteanalytics`: The NetSuite user's password. - - Service `opendistro`: The user's password. - - Service `opensearch`: The user's password. - - Service `oracle`: The user's password. - - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence user password. - - Service `oracle_ebs`: The user's password. - - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud user password. - - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud user password. - - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud user password. - - Service `oracle_hva`: The user's password. - - Service `oracle_rac`: The user's password. - - Service `oracle_rds`: The user's password. - - Service `oracle_sap_hva`: The user's password. - - Service `oracle_sap_hva_netweaver`: The user's password. - - Service `outbrain`: The Outbrain user's password. - - Service `pardot`: The Pardot user's password. - - Service `partnerize`: Your Partnerize account's password. - - Service `podio`: Your Podio password. - - Service `postgres`: The user's password. - - Service `postgres_rds`: The user's password. - - Service `redshift_db`: The Redshift user's password. - - Service `revx`: Your RevX Password. - - Service `rtb_house`: Your RTB House password. - - Service `sap_business_by_design`: The SAP Business ByDesign password. - - Service `sap_hana`: Your SAP HANA password. - - Service `sap_s4hana`: Your SAP S/4 password. - - Service `scorm`: Your Scorm Secret Key. - - Service `servicenow`: Your account password. - - Service `sftp`: SFTP password. - - Service `shiphero`: Your ShipHero password. - - Service `shipstation`: Your ShipStation password. - - Service `shopware`: Your Shopware password. - - Service `skuvault`: Your SkuVault password. - - Service `smadex`: Your Smadex Password. - - Service `snowflake_db`: The Snowflake user's password. - - Service `splash`: Your Splash password. - - Service `splunk`: The Splunk user's password. - - Service `sql_server`: The user's password. - - Service `sql_server_hva`: The user's password. - - Service `sql_server_rds`: The user's password. - - Service `sql_server_sap_ecc_hva`: The user's password. - - Service `starrez`: Your StarRez API password - - Service `stylight`: Your Stylight Password. - - Service `teamwork`: Your Teamwork password. - - Service `the_trade_desk`: The Trade Desk password. It is a part of the login credentials. - - Service `toggl_track`: Your Toggl Track Password - - Service `ukg_pro`: Your UKG Pro password. - - Service `unicommerce`: Your uniware login password. - - Service `upland`: Your Upland Software Password. - - Service `veevavault`: Your Veeva Vault password. - - Service `walmart_dsp`: Walmart DSP password. It is a part of the login credentials. - - Service `when_i_work`: Your When I Work password. - - Service `wherefour`: Your Wherefour password. - - Service `workday`: Workday password. - - Service `workday_financial_management`: Workday password. - - Service `workday_hcm`: Workday password. - - Service `xandr`: Your Xandr password. - - Service `younium`: Your Younium password. -- `pat` (String, Sensitive) Field usage depends on `service` value: - - Service `github`: The `Personal Access Token` generated in Github. -- `pat_name` (String) Field usage depends on `service` value: - - Service `tableau_source`: Your Tableau Source PAT Name. -- `pat_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `tableau_source`: Your Tableau Source PAT Secret. -- `path` (String) Field usage depends on `service` value: - - Service `jira`: A URL subdirectory where the Jira instance is working. -- `pats` (Set of String, Sensitive) Field usage depends on `service` value: - - Service `github`: The `Personal Access Tokens`. -- `pattern` (String) Field usage depends on `service` value: - - Service `aws_cost_report`: Optional. All files in your search path matching this regular expression will be synced. This parameter is optional. - - Service `azure_blob_storage`: All files in your search path matching this regular expression will be synced. - - Service `box`: All files in your search path matching this regular expression will be synced. - - Service `dropbox`: All files in your search path matching this regular expression will be synced. - - Service `email`: All files in your search path matching this regular expression will be synced. - - Service `ftp`: All files in your search path matching this regular expression will be synced. - - Service `gcs`: All files in your search path matching this regular expression will be synced. - - Service `google_drive`: All files in your search path matching this regular expression will be synced. - - Service `kinesis`: Optional. All files in your search path matching this regular expression will be synced. This parameter is optional. - - Service `s3`: All files in your search path matching this regular expression will be synced. - - Service `sftp`: All files in your search path matching this regular expression will be synced. - - Service `share_point`: All files in your search path matching this regular expression will be synced. - - Service `wasabi_cloud_storage`: All files in your search path matching this regular expression will be synced. -- `pdb_name` (String) Field usage depends on `service` value: - - Service `oracle`: Optional: Required only for containerized database. - - Service `oracle_ebs`: Optional: Required only for containerized database. - - Service `oracle_hva`: (Multi-tenant databases only) The database's PDB name. Exclude this parameter for single-tenant databases. - - Service `oracle_rac`: Optional: Required only for containerized database. - - Service `oracle_rds`: Optional: Required only for containerized database. - - Service `oracle_sap_hva`: (Multi-tenant databases only) The database's PDB name. Exclude this parameter for single-tenant databases. -- `pem_certificate` (String, Sensitive) Field usage depends on `service` value: - - Service `apple_search_ads`: The contents of your PEM certificate file. Must be populated if `is_auth2_enabled` is set to `false`. -- `pem_private_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apple_search_ads`: The contents of your PEM secret key file. Must be populated if `is_auth2_enabled` is set to `true`. -- `per_interaction_dimensions` (Set of String) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: Per Interaction Dimensions. -- `personal_access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `cj_commission_detail`: Your CJ Commission Detail personal access token. - - Service `databricks_db`: Access Token - - Service `harvest`: Your Harvest Personal Access Token. - - Service `productive`: Your Productive personal access token. - - Service `totango`: Your Totango personal access token. -- `personal_api_token` (String, Sensitive) Field usage depends on `service` value: - - Service `circleci`: Your CircleCI Personal API token. - - Service `monday`: Your Monday.com Personal API Token. -- `pgp_pass_phrase` (String, Sensitive) Field usage depends on `service` value: - - Service `azure_blob_storage`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `ftp`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `gcs`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `s3`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `sftp`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `wasabi_cloud_storage`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. -- `pgp_secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `azure_blob_storage`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `ftp`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `gcs`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `s3`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `sftp`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. - - Service `wasabi_cloud_storage`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. -- `phone_number` (String) Field usage depends on `service` value: - - Service `itunes_connect`: Register the number on AppleId Account Page for 2FA -- `port` (Number) Field usage depends on `service` value: - - Service `aurora`: The port number. - - Service `aurora_postgres`: The port number. - - Service `azure_postgres`: The port number. - - Service `azure_sql_db`: The port number. - - Service `azure_sql_managed_db`: The port number. - - Service `clarity`: The port number. - - Service `cockroachdb`: The port number. - - Service `databricks_db`: The port of your SQL warehouse. - - Service `db2i_hva`: The port number. - - Service `db2i_sap_hva`: The port number. - - Service `documentdb`: Port of the primary node. Ignored if `hosts` value is provided. - - Service `dynamics_365_fo`: The port number. - - Service `ehr`: The port number. - - Service `elastic_cloud`: The port number. - - Service `es_self_hosted`: The port number. - - Service `ftp`: FTP port. - - Service `google_cloud_mysql`: The port number. - - Service `google_cloud_postgresql`: The port number. - - Service `google_cloud_sqlserver`: The port number. - - Service `hana_sap_hva_b1`: The port number. - - Service `hana_sap_hva_ecc`: The port number. - - Service `hana_sap_hva_ecc_netweaver`: The port number. - - Service `hana_sap_hva_s4`: The port number. - - Service `hana_sap_hva_s4_netweaver`: The port number. - - Service `heroku_postgres`: The port number. - - Service `jira`: The Jira service host port. - - Service `magento_mysql`: The port number. - - Service `magento_mysql_rds`: The port number. - - Service `maria`: The port number. - - Service `maria_azure`: The port number. - - Service `maria_rds`: The port number. - - Service `mysql`: The port number. - - Service `mysql_azure`: The port number. - - Service `mysql_rds`: The port number. - - Service `netsuite_suiteanalytics`: The NetSuite service host port. - - Service `opendistro`: The port number. - - Service `opensearch`: The port number. - - Service `oracle`: The port number. - - Service `oracle_ebs`: The port number. - - Service `oracle_hva`: The port number. - - Service `oracle_rac`: The port number. - - Service `oracle_rds`: The port number. - - Service `oracle_sap_hva`: The port number. - - Service `postgres`: The port number. - - Service `postgres_rds`: The port number. - - Service `redshift_db`: Port number - - Service `sap_hana`: The SAP HANA port number. - - Service `sap_s4hana`: The SAP S/4 port number. - - Service `sftp`: SFTP port. - - Service `snowflake_db`: The Snowflake optional port number. - - Service `splunk`: The Splunk service host port. - - Service `sql_server`: The port number. - - Service `sql_server_hva`: The port number. - - Service `sql_server_rds`: The port number. - - Service `sql_server_sap_ecc_hva`: The port number. -- `post_click_attribution_window_size` (String) Field usage depends on `service` value: - - Service `linkedin_ads`: The time period to attribute conversions based on clicks. Default value: `DAY_30` -- `prebuilt_report` (String) Field usage depends on `service` value: - - Service `facebook`: The name of report of which connector will sync the data. [Possible prebuilt_report values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#prebuiltreport). - - Service `google_analytics`: The name of the Prebuilt Report from which the connector will sync the data. -- `prefix` (String) Field usage depends on `service` value: - - Service `aws_cloudtrail`: If prefix is present when configuring the bucket. - - Service `aws_cost_report`: Folder path to the aws_cost_report files within the bucket. - - Service `aws_inventory`: The prefix if you used one when configuring the bucket. - - Service `azure_blob_storage`: All files and folders under this folder path will be searched for files to sync. - - Service `box`: All files and folders under this folder path will be searched for files to sync. - - Service `cloudfront`: The name of the CloudFront folder in the bucket. - - Service `dropbox`: All files and folders under this folder path will be searched for files to sync. - - Service `ftp`: All files and folders under this folder path will be searched for files to sync. - - Service `gcs`: All files and folders under this folder path will be searched for files to sync. - - Service `google_drive`: All files and folders under this folder path will be searched for files to sync, this can be the entire drive link or a folder URL - - Service `kinesis`: Folder path to the Kinesis files within the bucket. - - Service `marin`: Folder path to the Marin manifest file. - - Service `microsoft_dynamics_365_fo`: Folder name in which the exported dynamics 365 data resides. - - Service `s3`: All files and folders under this folder path will be searched for files to sync. - - Service `segment`: Folder path to the Segment files within the bucket. Must be populated if `sync_type` is set to `S3`. - - Service `sftp`: All files and folders under this folder path will be searched for files to sync. - - Service `share_point`: All files and folders under this folder path link will be searched for files to sync. This can be any shared folder link. - - Service `wasabi_cloud_storage`: All files and folders under this folder path will be searched for files to sync. -- `primary_key` (String, Sensitive) Field usage depends on `service` value: - - Service `care_quality_commission`: Your Care Quality Commission primary key. -- `primary_keys` (Set of String) Field usage depends on `service` value: - - Service `workday`: Primary Keys -- `private_key` (String, Sensitive) Field usage depends on `service` value: - - Service `absorb_lms`: Your Absorb LMS REST API private key. - - Service `adobe_analytics`: The complete contents of your private key file including the start and end tags (`----BEGIN PRIVATE KEY----` to `----END PRIVATE KEY----`). - - Service `adp_workforce_now`: Private Key. - - Service `anaplan`: The contents of your private key file. Must be populated if `auth_mode` is set to `Certificate`. - - Service `apple_search_ads`: The contents of your secret key file. Must be populated if `is_auth2_enabled` is set to `false`. - - Service `braintree`: The contents of your secret key file. - - Service `braintree_sandbox`: The contents of your secret key file. - - Service `qualtrics`: Your private key - - Service `snowflake_db`: Private access key. The field should be specified if authentication type is `KEY_PAIR`. -- `private_token` (String, Sensitive) Field usage depends on `service` value: - - Service `eventbrite`: Your Eventbrite private token. -- `product` (String) Field usage depends on `service` value: - - Service `webconnex`: Your Webconnex product. -- `profiles` (Set of String) Field usage depends on `service` value: - - Service `amazon_ads`: Specific User Profile IDs to sync. Must be populated if `sync_mode` is set to `SpecificProfiles`. - - Service `google_analytics`: Specific User Profile IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. - - Service `google_analytics_mcf`: Specific User Profile IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. -- `project_access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `rollbar`: Your Rollbar project access token. -- `project_credentials` (Block Set) (see [below for nested schema](#nestedblock--config--project_credentials)) -- `project_id` (String, Sensitive) Field usage depends on `service` value: - - Service `bigquery_db`: BigQuery project ID - - Service `google_analytics_360`: The project ID. - - Service `google_analytics_4_export`: The Project ID. - - Service `mixpanel`: Project ID -- `project_key` (String) Field usage depends on `service` value: - - Service `commercetools`: Your commercetools project key. -- `projects` (Set of String) Field usage depends on `service` value: - - Service `asana`: Specific Project IDs to sync. Must be populated if `syncMode` is set to `SpecificProjects`. - - Service `jira`: Specific projects to sync. Must be populated if `syncMode` is set to `CUSTOM`. -- `properties` (Set of String) Field usage depends on `service` value: - - Service `google_analytics_4`: The array of strings in the `properties/{id}` format where `id` is a Google Analytics 4 property identifier. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. -- `property_id` (String) Field usage depends on `service` value: - - Service `cloudbeds`: Your Cloudbeds Property IDs. -- `public_key` (String) Field usage depends on `service` value: - - Service `aurora`: Public Key - - Service `aurora_postgres`: Public Key - - Service `azure_blob_storage`: Public key generated by Fivetran to be copied into the host-machine's authorized keys file. - - Service `azure_cosmos_for_mongo`: Public Key - - Service `azure_postgres`: Public Key - - Service `azure_service_bus`: Public key generated by Fivetran to be copied into the host-machine's authorized keys file. - - Service `azure_sql_db`: Public Key. - - Service `azure_sql_managed_db`: Public Key. - - Service `braintree`: The contents of your PEM certificate file. - - Service `braintree_sandbox`: The contents of your PEM certificate file. - - Service `clarity`: Public Key. - - Service `cockroachdb`: Public Key - - Service `db2i_hva`: Public Key - - Service `db2i_sap_hva`: Public Key - - Service `documentdb`: Public Key - - Service `dynamics_365_fo`: Public Key. - - Service `ehr`: Public Key. - - Service `elastic_cloud`: Public Key - - Service `es_self_hosted`: Public Key - - Service `google_cloud_mysql`: Public Key - - Service `google_cloud_postgresql`: Public Key - - Service `google_cloud_sqlserver`: Public Key. - - Service `hana_sap_hva_b1`: Public Key - - Service `hana_sap_hva_ecc`: Public Key - - Service `hana_sap_hva_ecc_netweaver`: Public Key - - Service `hana_sap_hva_s4`: Public Key - - Service `hana_sap_hva_s4_netweaver`: Public Key - - Service `heroku_postgres`: Public Key - - Service `magento_mysql`: Public Key - - Service `magento_mysql_rds`: Public Key - - Service `maria`: Public Key - - Service `maria_azure`: Public Key - - Service `maria_rds`: Public Key - - Service `mongo`: Public Key - - Service `mongo_sharded`: Public Key - - Service `mysql`: Public Key - - Service `mysql_azure`: Public Key - - Service `mysql_rds`: Public Key - - Service `opendistro`: Public Key - - Service `opensearch`: Public Key - - Service `oracle`: Public Key - - Service `oracle_ebs`: Public Key - - Service `oracle_hva`: Public Key - - Service `oracle_rac`: Public Key - - Service `oracle_rds`: Public Key - - Service `oracle_sap_hva`: Public Key - - Service `partnerstack_vendor`: Your PartnerStack Vendor Public key. - - Service `postgres`: Public Key - - Service `postgres_rds`: Public Key - - Service `sap_hana`: Public Key - - Service `sap_s4hana`: Public Key - - Service `sftp`: Public Key - - Service `sql_server`: Public Key. - - Service `sql_server_hva`: Public Key. - - Service `sql_server_rds`: Public Key. - - Service `sql_server_sap_ecc_hva`: Public Key. -- `publication_name` (String) Field usage depends on `service` value: - - Service `aurora_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. - - Service `azure_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. - - Service `google_cloud_postgresql`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. - - Service `heroku_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. - - Service `postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. - - Service `postgres_rds`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. -- `pull_archived_campaigns` (Boolean) Field usage depends on `service` value: - - Service `outbrain`: Include or ignore results from archived campaigns -- `query_id` (String) Field usage depends on `service` value: - - Service `google_display_and_video_360`: The ID of the query whose configuration you want to reuse. This is a required parameter when `config_method` is set to `REUSE_EXISTING`. -- `query_param_value` (String, Sensitive) Field usage depends on `service` value: - - Service `alchemer`: Your Alchemer API key. - - Service `birdeye`: Your Birdeye query-param-value. -- `quota_project_id` (String) Field usage depends on `service` value: - - Service `bigquery_db`: Specify a different project ID to account for quota and billing of Fivetran query workload -- `refresh_token` (String, Sensitive) Field usage depends on `service` value: - - Service `ironsource`: Your Ironsource `Client Secret`. -- `refresh_token_expires_at` (String) Field usage depends on `service` value: - - Service `pinterest_ads`: The expiration date of the refresh token. Unix timestamp in seconds -- `region` (String) Field usage depends on `service` value: - - Service `algolia`: Your Algolia analytics region. - - Service `amazon_ads`: The region used by the Amazon Ads profile. - - Service `amazon_selling_partner`: The region used by the Amazon Selling Partner profile. - - Service `anaplan`: Your Anaplan account region - - Service `atlassian_ops_genie`: Your company's Osgenie region (usually **company**.opsgenie.com) - - Service `awin`: Your Awin Region. - - Service `aws_lambda`: The AWS region code for the DynamoDB instance. - - Service `concur`: The region. - - Service `cvent`: Your Cvent region. - - Service `exact_online`: Your Exact Online region. - - Service `getfeedback`: Your GetFeedback region. - - Service `happyfox`: Your HappyFox region. - - Service `keypay`: Your KeyPay region. - - Service `medallia_agile_research`: Your Medallia Agile region. - - Service `messagebird`: Your MessageBird Account region. - - Service `mixpanel`: Data Region - - Service `navan`: Your Navan region. - - Service `on24`: Your ON24 region. - - Service `pendo`: The Pendo account region. - - Service `proofpoint_security_awareness`: Your Proofpoint Security Awareness Region. - - Service `ringover`: Your Ringover region. - - Service `samsara`: The region of your Samsara account. For instance, if your region is `EUROPE`, provide `eu.samsara` in the `Region` field. If your region is not in Europe, provide `samsara`. - - Service `snyk`: Your Snyk region. - - Service `talkdesk`: Your Talkdesk region (".com",".eu","ca.com") - - Service `totango`: Your Totango region. - - Service `vonage_contact_center`: Your Vonage Contact Center region. - - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage bucket region. Required for connector creation. Default value: `US_EAST_1`. - - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing Region. - - Service `zoho_books`: Your Zoho Books application host region. - - Service `zoho_campaigns`: Your Zoho Campaigns application host region. - - Service `zoho_desk`: Your Zoho Desk domain. - - Service `zoho_inventory`: Your Zoho Inventory application host region. -- `region_api_url` (String) Field usage depends on `service` value: - - Service `amazon_attribution`: Your Amazon Attribution API URL region. -- `region_auth_url` (String) Field usage depends on `service` value: - - Service `amazon_attribution`: Your Amazon Attribution auth URL region. -- `region_token_url` (String) Field usage depends on `service` value: - - Service `amazon_attribution`: Your Amazon Attribution token URL region. -- `region_url` (String) Field usage depends on `service` value: - - Service `playvox_workforce_management`: Your Playvox Workforce Management Region URL. -- `replica_id` (Number) Field usage depends on `service` value: - - Service `aurora`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `google_cloud_mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `magento_mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `magento_mysql_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `maria`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `maria_azure`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `maria_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `mysql_azure`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. - - Service `mysql_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. -- `replication_slot` (String) Field usage depends on `service` value: - - Service `aurora_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. - - Service `azure_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. - - Service `google_cloud_postgresql`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. - - Service `heroku_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. - - Service `postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. - - Service `postgres_rds`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. -- `report_configs` (Block Set) (see [below for nested schema](#nestedblock--config--report_configs)) -- `report_configuration_ids` (Set of String) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: You can select only one Floodlight Configuration ID per account. -- `report_format_type` (String) Field usage depends on `service` value: - - Service `workday`: This is to select report format from JSON and CSV. By default, report format is JSON. -- `report_keys` (String) Field usage depends on `service` value: - - Service `rakutenadvertising`: Your Rakuten Advertising report keys. -- `report_list` (Block Set) (see [below for nested schema](#nestedblock--config--report_list)) -- `report_suites` (Set of String) -- `report_timezone` (String) Field usage depends on `service` value: - - Service `criteo`: Report Timezone -- `report_type` (String) Field usage depends on `service` value: - - Service `adroll`: The report type you want. Default value: `ALL_ADS`. - - Service `double_click_campaign_manager`: Type of reporting data to sync. Default value: `STANDARD`. - - Service `google_display_and_video_360`: The type of the report to create. This is a required parameter when `config_method` is set to `CREATE_NEW`. - - Service `youtube_analytics`: The name of report of which connector will sync the data. -- `report_url` (String) Field usage depends on `service` value: - - Service `workday`: URL for a live custom report. -- `reports` (Block Set) (see [below for nested schema](#nestedblock--config--reports)) -- `reports_linkedin_ads` (Set of String) Field usage depends on `service` value: - - Service `linkedin_ads`: Specific analytics reports to sync. Must be populated if adAnalytics is set to 'SpecificReports'. -- `repositories` (Set of String) Field usage depends on `service` value: - - Service `github`: Specific Repository IDs to sync. Must be populated if `syncMode` is set to `SpecificRepositories`. -- `resource_token` (String, Sensitive) Field usage depends on `service` value: - - Service `cosmos`: A token that provides access to a specific Azure Cosmos DB resource. Required for the `RESOURCE_TOKEN` data access method. -- `resource_url` (String) Field usage depends on `service` value: - - Service `dynamics_365`: URL at which Dynamics 365 is accessed -- `rest_api_limit` (Number) Field usage depends on `service` value: - - Service `pardot`: The number of API calls that the connector should not exceed in a day. Default REST API call limit per day: 150,000. -- `rfc_library_path` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. - - Service `hana_sap_hva_s4_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. - - Service `oracle_sap_hva_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. -- `role` (String) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: The NetSuite Role ID for connection. - - Service `snowflake_db`: Snowflake Connector role name -- `role_arn` (String, Sensitive) Field usage depends on `service` value: - - Service `appsflyer`: S3 Role ARN - - Service `aws_cloudtrail`: The Role ARN required for authentication. - - Service `aws_cost`: The Role ARN required for authentication. - - Service `aws_cost_report`: The Role ARN required for authentication. - - Service `aws_inventory`: The Role ARN required for authentication. - - Service `aws_lambda`: The Role ARN required for authentication. - - Service `aws_msk`: If `sasl_mechanism` is set to `IAM`, enter your Role ARN - - Service `cloudfront`: The Role ARN required for authentication. - - Service `dynamodb`: Role ARN - - Service `heap`: The Role ARN required for authentication. - - Service `kinesis`: The Role ARN required for authentication. - - Service `s3`: The Role ARN required for authentication. Required for connector creation when syncing using private bucket. - - Service `segment`: The Role ARN required for authentication. Must be populated if `sync_type` is set to `S3`. -- `rollback_window` (Number) Field usage depends on `service` value: - - Service `appsflyer`: Rollback window -- `rollback_window_size` (Number) Field usage depends on `service` value: - - Service `bingads`: A period of time in days during which a conversion is recorded. -- `s3_bucket` (String) Field usage depends on `service` value: - - Service `webhooks`: The S3 bucket name. Required if `bucket_service` is set to `S3`. -- `s3_export_bucket` (String) Field usage depends on `service` value: - - Service `braze`: Your S3 user export bucket. Required if `AWS_S3` is the `export_storage_type` -- `s3_export_external_id` (String) Field usage depends on `service` value: - - Service `braze`: This is the same as your `group_id`, used if `export_storage_type` is `AWS_S3` -- `s3_export_folder` (String) Field usage depends on `service` value: - - Service `braze`: Your S3 user export folder name. Required if `AWS_S3` is the `export_storage_type` -- `s3_export_role_arn` (String, Sensitive) Field usage depends on `service` value: - - Service `braze`: The Role ARN required for authentication required if `AWS_S3` is the `export_storage_type` -- `s3_role_arn` (String, Sensitive) Field usage depends on `service` value: - - Service `adjust`: Used if the `export_storage_type` is `AWS_S3`, the Role ARN required for authentication. - - Service `webhooks`: The Role ARN required for authentication. Required if `bucket_service` is set to `S3`. -- `s3bucket` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: The S3 bucket name. - - Service `braze`: Your S3 bucket required if `AWS_S3` is the `cloud_storage_type` - - Service `sailthru`: Name of the bucket configured to receive sailthru connect data. -- `s3external_id` (String) Field usage depends on `service` value: - - Service `braze`: This is the same as your `group_id`, used for authentication along with the `role_arn` required if `AWS_S3` is the `cloud_storage_type` - - Service `sailthru`: The external ID is a string that designates who can assume the role. -- `s3folder` (String) Field usage depends on `service` value: - - Service `braze`: Your S3 folder name required if `AWS_S3` is the `cloud_storage_type` -- `s3path` (String) Field usage depends on `service` value: - - Service `sailthru`: Copy and use this to configure Sailthru Connect in your sailthru account. -- `s3role_arn` (String, Sensitive) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: The Role ARN required for authentication. - - Service `braze`: The Role ARN required for authentication required if `AWS_S3` is the `cloud_storage_type` - - Service `sailthru`: Role ARN of the IAM role created for Fivetran. -- `sales_account_sync_mode` (String) Field usage depends on `service` value: - - Service `itunes_connect`: Whether to sync all sales accounts or specific sales accounts. -- `sales_accounts` (Set of String) Field usage depends on `service` value: - - Service `itunes_connect`: Specific sales account to sync. Must be populated if `sales_account_sync_mode` is set to `SpecificSalesAccounts`. -- `salesforce_security_token` (String, Sensitive) Field usage depends on `service` value: - - Service `pardot`: The Pardot user's Salesforce SSO Account Security Token. -- `sandbox_account` (String) Field usage depends on `service` value: - - Service `gocardless`: Your GoCardless account type. -- `sap_schema` (String) Field usage depends on `service` value: - - Service `db2i_hva`: The SAP schema. - - Service `db2i_sap_hva`: SAP schema name. - - Service `sql_server_sap_ecc_hva`: SAP Schema Name. -- `sap_source_schema` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_b1`: The schema name where the HANA tables reside. - - Service `hana_sap_hva_ecc`: The Hana schema name where the SAP tables reside. - - Service `hana_sap_hva_ecc_netweaver`: The Hana schema name where the SAP tables reside. - - Service `hana_sap_hva_s4`: The Hana schema name where the SAP tables reside. - - Service `hana_sap_hva_s4_netweaver`: The Hana schema name where the SAP tables reside. -- `sap_user` (String) Field usage depends on `service` value: - - Service `oracle_sap_hva`: The Oracle schema name where the SAP tables reside. -- `sasl_mechanism` (String) Field usage depends on `service` value: - - Service `apache_kafka`: SASL Mechanism - - Service `aws_msk`: If `security_protocol` is set to `SASL`, enter the SASL Mechanism -- `sasl_plain_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Key -- `sasl_plain_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Secret -- `sasl_scram256_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Key -- `sasl_scram256_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Secret -- `sasl_scram512_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Key - - Service `aws_msk`: If `sasl_mechanism` is set to `SCRAM_SHA_512`, enter your secret's `saslScram512Key`. -- `sasl_scram512_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: API Secret - - Service `aws_msk`: If `sasl_mechanism` is set to `SCRAM_SHA_512`, enter your secret's `saslScram512Key`. -- `schema_registry_credentials_source` (String) Field usage depends on `service` value: - - Service `apache_kafka`: Schema Registry Credentials source - - Service `aws_msk`: Schema Registry Credentials source - - Service `confluent_cloud`: Schema Registry Credentials source -- `schema_registry_key` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: Schema Registry Key - - Service `aws_msk`: Schema Registry Key - - Service `azure_service_bus`: The key used to access the schema registry. Required for the `avro` and `protobuf` message types - - Service `confluent_cloud`: Schema Registry Key -- `schema_registry_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: Schema Registry Secret - - Service `aws_msk`: Schema Registry Secret - - Service `azure_service_bus`: The secret used to access the schema registry. Required for the `avro` and `protobuf` message types - - Service `confluent_cloud`: Schema Registry Secret -- `schema_registry_urls` (Set of String) Field usage depends on `service` value: - - Service `apache_kafka`: Your schema registry URLs - - Service `aws_msk`: Your schema registry URLs - - Service `azure_service_bus`: The comma-separated list of schema registry servers in the `server:port` format - - Service `confluent_cloud`: Your schema registry URLs -- `scope` (String) Field usage depends on `service` value: - - Service `ebay`: Your eBay scopes. -- `seats` (Set of String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Specific Seats to sync. Must be populated if `sync_mode_seat` is set to `SPECIFIC_SEATS`. -- `secret` (String, Sensitive) Field usage depends on `service` value: - - Service `appcues`: Your Appcues Secret. - - Service `buildium`: Your Buildium API secret. - - Service `loopio`: Your Loopio Secret. - - Service `mode`: Your Mode Secret. - - Service `playvox_workforce_management`: Your Playvox Workforce Management Secret. - - Service `twilio`: The Twilio API secret - - Service `uservoice`: The UserVoice API secret. - - Service `vts`: Your VTS secret. -- `secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `afterpay`: Your Afterpay Secret Key. - - Service `appsflyer`: Your AWS secret key. - - Service `bigquery_db`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. - - Service `checkr`: Your Checkr secret key. - - Service `ezofficeinventory`: Your EZOfficeInventory API secret key. - - Service `gcs`: Your JSON Private Key. Used to authorize service account. Required if you use a Custom Service Account to authenticate the storage bucket. - - Service `ironsource`: Your Ironsource `Client ID`. - - Service `partnerstack_vendor`: Your PartnerStack Vendor Secret key. - - Service `paypal`: `Client Secret` of your PayPal client application. - - Service `paypal_sandbox`: `Client Secret` of your PayPal client application. - - Service `retailnext`: Your RetailNext secret key. - - Service `statsig`: Your Statsig secret key. - - Service `yotpo`: Your Yotpo Secret key -- `secrets` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_lambda`: The secrets that should be passed to the function at runtime. - - Service `azure_function`: The secrets that should be passed to the function at runtime. - - Service `google_cloud_function`: The secrets that should be passed to the function at runtime. -- `secrets_list` (Block Set) (see [below for nested schema](#nestedblock--config--secrets_list)) -- `security_protocol` (String) Field usage depends on `service` value: - - Service `apache_kafka`: Security protocol for Kafka interaction. - - Service `aws_msk`: The security protocol for Kafka interaction. - - Service `confluent_cloud`: Security protocol for Confluent Cloud interaction. - - Service `heroku_kafka`: Security protocol for Heroku Kafka interaction. -- `segments` (Set of String) Field usage depends on `service` value: - - Service `google_analytics`: A segment is a subset of your Analytics data that is made up of one or more non-destructive filters (filters that do not alter the underlying data). Those filters isolate subsets of users, sessions, and hits. -- `selected_event_types` (Set of String) Field usage depends on `service` value: - - Service `salesforce_marketing_cloud`: Select the event types to be synced. -- `selected_exports` (Set of String) Field usage depends on `service` value: - - Service `anaplan`: The list of export IDs in the format `workspace_id_model_id_export_id` that the connector will sync. Must be populated if `sync_mode` is set to `SpecificExports`. -- `sender_id` (String) Field usage depends on `service` value: - - Service `sage_intacct`: Your Sender ID -- `sender_password` (String, Sensitive) Field usage depends on `service` value: - - Service `sage_intacct`: Your Sender Password -- `server` (String) Field usage depends on `service` value: - - Service `castor_edc`: Your Castor EDC Server. -- `server_address` (String) Field usage depends on `service` value: - - Service `tableau_source`: Your Tableau Source server address. -- `server_url` (String) Field usage depends on `service` value: - - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence Instance URL. - - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud Instance URL. - - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud Instance URL. - - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud Instance URL. -- `server_variable` (String) Field usage depends on `service` value: - - Service `myosh`: Your myosh server variable. -- `servers` (Set of String) Field usage depends on `service` value: - - Service `apache_kafka`: Comma-separated list of Kafka servers in the format `server:port`. - - Service `aws_msk`: Comma-separated list of Kafka servers in the `server:port` format. - - Service `confluent_cloud`: Comma-separated list of Confluent Cloud servers in the format `server:port`. - - Service `heroku_kafka`: Comma-separated list of Heroku Kafka servers in the format `server:port`. -- `service_account` (String) Field usage depends on `service` value: - - Service `google_drive`: Share the folder with the email address -- `service_account_email` (String) Field usage depends on `service` value: - - Service `google_cloud_function`: Provide Invoker role to this service account. -- `service_account_key` (String, Sensitive) Field usage depends on `service` value: - - Service `firebase`: The contents of your service account key file. Required for authentication. -- `service_account_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `mixpanel`: Service Account Secret -- `service_account_username` (String) Field usage depends on `service` value: - - Service `mixpanel`: Service Account Username -- `service_authentication` (String, Sensitive) Field usage depends on `service` value: - - Service `dsv`: A base64 encoded variant of your `username:password` string. Required for authentication. -- `service_name` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to Service. - - Service `hana_sap_hva_s4_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to Service. - - Service `walmart_marketplace`: Your Walmart Marketplace service name. -- `sftp_host` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: SFTP host. - - Service `salesforce_marketing_cloud`: Host -- `sftp_is_key_pair` (Boolean) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Log in with key pair or password - - Service `salesforce_marketing_cloud`: Set this field if you use a key pair for logging into your SFTP server. Don't set it if you use a username and password -- `sftp_password` (String, Sensitive) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: SFTP password required if sftp_is_key_pair is false - - Service `salesforce_marketing_cloud`: Password -- `sftp_port` (Number) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: SFTP port. - - Service `salesforce_marketing_cloud`: Port -- `sftp_public_key` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: Public Key - - Service `salesforce_marketing_cloud`: Public Key -- `sftp_user` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: SFTP user. - - Service `salesforce_marketing_cloud`: User -- `share_url` (String) Field usage depends on `service` value: - - Service `share_point`: Your SharePoint folder URL. You can find the folder URL by following the steps mentioned [here](https://fivetran.com/docs/connectors/files/share-point/setup-guide). -- `sheet_id` (String) Field usage depends on `service` value: - - Service `google_sheets`: The URL of the sheet that can be copied from the browser address bar, or the ID of the sheet that can be found in the sheet's URL between **/d/** and **/edit**. -- `shop` (String) Field usage depends on `service` value: - - Service `shopify`: The Shopify shop name. Can be found in the URL before **.myshopify.com**. -- `short_code` (String, Sensitive) Field usage depends on `service` value: - - Service `salesforce_commerce_cloud`: The Salesforce eight-character string assigned to a realm for routing purposes. -- `should_sync_events_with_deleted_profiles` (Boolean) Field usage depends on `service` value: - - Service `klaviyo`: Sync events linked to deleted profiles -- `show_records_with_no_metrics` (Boolean) Field usage depends on `service` value: - - Service `apple_search_ads`: Turn the toggle on if you want the reports to also return records without metrics. -- `sid` (String) Field usage depends on `service` value: - - Service `twilio`: The Twilio API key SID -- `signer_public_key` (String) Field usage depends on `service` value: - - Service `azure_blob_storage`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. - - Service `ftp`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. - - Service `gcs`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. - - Service `s3`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. - - Service `sftp`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. - - Service `wasabi_cloud_storage`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. -- `site_address` (String) Field usage depends on `service` value: - - Service `teamwork`: Your Teamwork site address. -- `site_id` (String) Field usage depends on `service` value: - - Service `microsoft_lists`: The Site ID of the SharePoint site from which you want to sync your lists. The Site ID is the `id` field in the [Graph API](https://docs.microsoft.com/en-us/graph/api/site-search?view=graph-rest-1.0&tabs=http) response for sites. - - Service `salesforce_commerce_cloud`: The name of the site from which you want to sync data. -- `site_name` (String) Field usage depends on `service` value: - - Service `microsoft_lists`: The Name of the SharePoint site. The Site Name is the `name` field in the Graph API response for sites. - - Service `tableau_source`: Your Tableau Source site name. -- `site_urls` (Set of String) Field usage depends on `service` value: - - Service `google_search_console`: Specific Site URLs to sync. Must be populated if `sync_mode` is set to `SpecificSites`. -- `skip_after` (Number) Field usage depends on `service` value: - - Service `aws_cost_report`: Enter 1 or greater - - Service `azure_blob_storage`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `box`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `dropbox`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `email`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `ftp`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `gcs`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `google_drive`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `kinesis`: Enter 1 or greater - - Service `s3`: We will skip over the number of lines specified at the end to avoid introducing aberrant data into your destination. - - Service `sftp`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `share_point`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. - - Service `wasabi_cloud_storage`: We will skip over the number of lines specified at the end to avoid introducing aberrant data into your destination. -- `skip_before` (Number) Field usage depends on `service` value: - - Service `aws_cost_report`: Enter 1 or greater - - Service `azure_blob_storage`: We will skip over the number of lines specified before syncing data. - - Service `box`: We will skip over the number of lines specified before syncing data. - - Service `dropbox`: We will skip over the number of lines specified before syncing data. - - Service `email`: We will skip over the number of lines specified before syncing data. - - Service `ftp`: We will skip over the number of lines specified before syncing data. - - Service `gcs`: We will skip over the number of lines specified before syncing data. - - Service `google_drive`: We will skip over the number of lines specified before syncing data. - - Service `kinesis`: Enter 1 or greater - - Service `s3`: We will skip over the number of lines specified before syncing data. - - Service `sftp`: We will skip over the number of lines specified before syncing data. - - Service `share_point`: We will skip over the number of lines specified before syncing data. - - Service `wasabi_cloud_storage`: We will skip over the number of lines specified before syncing data. -- `skip_empty_reports` (Boolean) Field usage depends on `service` value: - - Service `google_ads`: Toggles the ["Skip empty reports"](https://fivetran.com/docs/connectors/applications/google-ads#skipemptyreports) feature. Enabled by default -- `snc_certificate` (String, Sensitive) -- `snc_certificate_source` (String, Sensitive) -- `snc_fivetran_name` (String) -- `snc_library_path` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Path to the external security product's library. - - Service `hana_sap_hva_s4_netweaver`: Path to the external security product's library. -- `snc_mode` (String) -- `snc_my_name` (String) -- `snc_name` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Client SNC name. - - Service `hana_sap_hva_s4_netweaver`: Client SNC name. -- `snc_partner_name` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Communication partner's SNC name. - - Service `hana_sap_hva_s4_netweaver`: Communication partner's SNC name. -- `snc_source_name` (String) -- `soap_uri` (String) Field usage depends on `service` value: - - Service `marketo`: Marketo SOAP API Endpoint. -- `social_data_sync_timeframe` (String) Field usage depends on `service` value: - - Service `linkedin_company_pages`: The social data (UGCPosts, Shares, Comments) sync time frame in months. Default value: `SIX` . -- `source` (String) Field usage depends on `service` value: - - Service `adobe_analytics_data_feed`: The data source. -- `store_hash` (String) Field usage depends on `service` value: - - Service `big_commerce`: The BigCommerce store hash. -- `store_id` (String) Field usage depends on `service` value: - - Service `reviewsio`: Your REVIEWS.io store ID -- `sub_domain` (String) Field usage depends on `service` value: - - Service `absorb_lms`: Your Absorb LMS subdomain. - - Service `activecampaign`: Your ActiveCampaign sub-domain. - - Service `acumatica`: Your Acumatica subdomain. - - Service `ada`: Your Ada sub-domain. - - Service `alchemer`: Your Alchemer sub-domain. - - Service `atlassian_jira_align`: Your Jira Align base URL. - - Service `azure_boards`: Your Azure Boards Organization Name. - - Service `azure_devops`: Your Azure Organization Name - - Service `betterworks`: Your Betterworks subdomain. - - Service `bubble`: Your Bubble subdomain. - - Service `buildium`: Your Buildium subdomain. - - Service `canvas_by_instructure`: Your Canvas by Instructure domain. - - Service `chargebee_product_catalog_1`: Your Chargebee Product Catalog 1 subdomain. - - Service `chargebee_product_catalog_2`: Your Chargebee subdomain. - - Service `checkr`: Your Checkr subdomain. - - Service `clubspeed`: Your Clubspeed subdomain. - - Service `collibra`: Your collibra subdomain. - - Service `concord`: Your Concord Sub Domain. - - Service `contrast_security`: Your Contrast Security subdomain. - - Service `customerio`: Your Customer.io region-specific Subdomain. - - Service `dbt_cloud`: Your dbt Cloud API server region. - - Service `deputy`: Your Deputy subdomain. - - Service `docebo`: Your Docebo subdomain. - - Service `drata`: Your Drata sub_domain. - - Service `eventsforce`: Your Eventsforce subdomain. - - Service `ezofficeinventory`: Your EZOfficeInventory Subdomain. - - Service `fountain`: Your Fountain subdomain. - - Service `freshchat`: Your Freshchat Sub Domain - - Service `gainsight_customer_success`: The subdomain of your Gainsight account. - - Service `gainsight_product_experience`: Your Gainsight Product Experience subdomain. - - Service `genesys`: Your Genesys subdomain. - - Service `green_power_monitor`: Your GreenPowerMonitor subdomain. - - Service `infobip`: Your Infobip sub_domain. - - Service `insightly`: Your company's Insightly subdomain name. - - Service `instructure`: The Sub domain in which your Instructure account is hosted. - - Service `jamf`: Your Jamf subdomain. - - Service `kandji`: Your Kandji Subdomain. - - Service `khoros_care`: Your Khoros Care subDomain. - - Service `looker_source`: Your looker SubDomain name. - - Service `mailgun`: Your Mailgun subdomain. - - Service `maxio_chargify`: Enter Your Subdomain. - - Service `myosh`: Your myosh subdomain. - - Service `namely`: Your Namely subdomain. - - Service `nylas`: Your Nylas subdomain. - - Service `okta`: Your Okta subdomain. - - Service `picqer`: Your Picqer subdomain. - - Service `pinpoint`: Your Pinpoint sub domain name. - - Service `piwik_pro`: Your Piwik PRO subdomain. - - Service `playvox`: Your Playvox Subdomain. - - Service `posthog`: Your PostHog data region (`app` or `eu`). - - Service `recurly`: Your company's Recurly subdomain. - - Service `reltio`: Your Reltio subdomain. - - Service `revel`: Your Revel Systems subDomain. - - Service `rundeck`: Your Rundeck subdomain. - - Service `sage_hr`: Your Sage HR subdomain. - - Service `salesforce_marketing_cloud`: Your Salesforce Marketing Cloud subdomain. - - Service `salsify`: Your Salsify Organization ID. - - Service `sap_success_factors`: Your SAP SuccessFactors Subdomain. - - Service `sonarqube`: Your Sonarqube subdomain. - - Service `starrez`: Your StarRez subdomain - - Service `tableau_source`: Your Tableau Source subdomain. - - Service `tempo`: Your Tempo subdomain. - - Service `testrail`: Your TestRail subdomain. - - Service `thinkific`: Your Thinkific subdomain. - - Service `totango`: Your Totango Subdomain. - - Service `tymeshift`: Your Tymeshift subdomain. - - Service `upland`: Your Upland Software subDomain. - - Service `wordpress`: Your WordPress subdomain. - - Service `workable`: Your Workable Subdomain. - - Service `wrike`: Your Wrike Subdomain. -- `subdomain` (String) Field usage depends on `service` value: - - Service `bamboohr`: The subdomain used to access your account. If you access BambooHR at 'https://mycompany.bamboohr.com', then the subdomain is 'mycompany'. - - Service `datadog`: Your Datadog subdomain. - - Service `ebay`: Your eBay environment. - - Service `freshdesk`: Your company's freshdesk subdomain (usually **company**.freshdesk.com). - - Service `freshsales`: Your Freshsales domain. - - Service `freshservice`: Your company's freshservice subdomain (usually **company**.freshservice.com). - - Service `freshsuccess`: Your Freshsuccess subdomain. - - Service `gorgias`: Your Gorgias subdomain. - - Service `jama_software`: Your Jama Software subdomain. - - Service `klarna`: Your Klarna subdomain. - - Service `learnupon`: Your Learnupon subdomain. - - Service `maxio_saasoptics`: Your Maxio SaaSOptics subdomain. - - Service `medallia`: Medallia subdomain - - Service `skillstx`: Your SkillsTX subdomain. - - Service `smarthr`: Your SmartHR subdomain. - - Service `sonarqube`: Your Sonarqube subdomain. - - Service `toast`: Your Toast domain. - - Service `vts`: Your VTS Subdomain. - - Service `zendesk_chat`: Your Zendesk domain. -- `subscription` (String) Field usage depends on `service` value: - - Service `retailnext`: Your RetailNext subscription. -- `subscription_key` (String, Sensitive) Field usage depends on `service` value: - - Service `dsv`: Your DSV subscription key. -- `support_connected_accounts_sync` (Boolean) Field usage depends on `service` value: - - Service `stripe`: Sync Connected Accounts. Connected Account Documentation - https://stripe.com/docs/api/connected_accounts. - - Service `stripe_test`: Sync Connected Accounts. Connected Account Documentation - https://stripe.com/docs/api/connected_accounts. -- `support_nested_columns` (Boolean) Field usage depends on `service` value: - - Service `workday`: This option is to unpack the nested columns and sync them separately. By default, we sync the nested columns as JSON objects. -- `survey_ids` (String) Field usage depends on `service` value: - - Service `qualaroo`: Array of Qualaroo Survey IDs. -- `swipe_attribution_window` (String) Field usage depends on `service` value: - - Service `snapchat_ads`: The time period to attribute conversions based on swipes. Default value: `DAY_28` -- `sync_data_locker` (Boolean) Field usage depends on `service` value: - - Service `appsflyer`: Sync AppsFlyer Data Locker. Default value is `true`, set it to `false` to sync AppsFlyer data using only webhooks. -- `sync_format` (String) Field usage depends on `service` value: - - Service `webhooks`: The webhooks sync format. Default value: `Unpacked`. Unpacked messages must be valid JSON. -- `sync_formula_fields` (Boolean) Field usage depends on `service` value: - - Service `salesforce`: (optional) Configuration to enable syncing formulaFields. Make sure its value is `true` or `false` - - Service `salesforce_sandbox`: (Optional) Sync formula fields (default value = `false`). -- `sync_metadata` (Boolean) Field usage depends on `service` value: - - Service `facebook_ads`: Parameter defining whether to enable or disable metadata synchronisation. Default value: `TRUE`. -- `sync_method` (String) Field usage depends on `service` value: - - Service `aws_lambda`: Sync Method -- `sync_mode` (String) Field usage depends on `service` value: - - Service `adroll`: Whether to sync all advertisables or specific advertisables. Default value: `AllAdvertisables`. - - Service `amazon_ads`: Option to select connector should sync all profiles or specific profiles. - - Service `anaplan`: Whether to sync all exports or specific exports. - - Service `apple_search_ads`: Sync Mode - - Service `asana`: Whether to sync all projects or specific projects. - - Service `bingads`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. - - Service `double_click_campaign_manager`: Whether to sync all user profiles or specific ones. Default value: `AllAccounts`. - - Service `dynamodb`: Whether to sync all tables in unpacked mode only or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. - - Service `facebook`: Option to select connector should sync all accounts or specific accounts. [Possible sync_mode values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#syncmode). - - Service `facebook_ad_account`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. - - Service `facebook_ads`: Option to select connector should sync all accounts or specific accounts. [Possible sync_mode values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#syncmode). - - Service `facebook_pages`: Whether to sync all accounts or specific accounts. Default value: `AllPages`. - - Service `github`: Whether to sync all repositories or specific repositories. - - Service `google_ads`: Whether to sync all accounts or specific accounts. - - Service `google_analytics`: Whether to sync all accounts or specific accounts. - - Service `google_analytics_4`: Whether to sync all accounts or specific accounts. - - Service `google_analytics_mcf`: Whether to sync all accounts or specific accounts. Default value: `ALL_ACCOUNTS` - - Service `google_search_console`: Whether to sync all sites or specific sites. - - Service `instagram_business`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. - - Service `iterable`: Events Sync Mode. - - Service `jira`: Whether to sync all projects or specific projects. - - Service `linkedin_ads`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts` - - Service `pendo`: Whether to sync all App IDs or specific App IDs. Default value: `AllAppIds`. - - Service `pinterest_ads`: Whether to sync all advertisers or specific advertisers. - - Service `reddit_ads`: Whether to sync all accounts or specific accounts. - - Service `salesforce_marketing_cloud`: Select the sync mode. - - Service `snapchat_ads`: Whether to sync all organizations or specific organizations. Default value: `AllOrganizations`. - - Service `spotify_ads`: Whether to sync all ad accounts or specific ad accounts. - - Service `taboola`: Whether to sync all accounts or specific accounts. - - Service `the_trade_desk`: Whether to sync all accounts or specific accounts. - - Service `tiktok_ads`: Whether to sync all advertiser accounts or specific accounts. - - Service `twilio`: Whether to sync all accounts or specific accounts. - - Service `twitter`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. - - Service `twitter_ads`: Whether to sync all accounts or specific accounts. - - Service `walmart_dsp`: Whether to sync all accounts or specific accounts. - - Service `yahoo_gemini`: Whether to sync all accounts or specific accounts. Default value: `SpecificAccounts`. - - Service `zuora`: Select `Zuora Billing` to sync exclusively Zuora Billing related records. Choose `Zuora Revenue` for syncing only Zuora Revenue reports. If both Zuora Billing records and Zuora Revenue reports are to be synced, opt for `Both`. - - Service `zuora_sandbox`: Select `Zuora Billing` to sync exclusively Zuora Billing related records. Choose `Zuora Revenue` for syncing only Zuora Revenue reports. If both Zuora Billing records and Zuora Revenue reports are to be synced, opt for `Both`. -- `sync_mode_advertiser` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Whether to sync all advertisers or specific advertisers. Default value: `ALL_ADVERTISERS`. -- `sync_mode_seat` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Whether to sync all seats or specific seats. Default value: `ALL_SEATS`. -- `sync_multiple_accounts` (Boolean) Field usage depends on `service` value: - - Service `reddit_ads`: When this parameter is set to `true`, we sync the data of the additional linked accounts. When this parameter is set to `false`, we sync only the data from the main account that was used for authorization -- `sync_pack_mode` (String) Field usage depends on `service` value: - - Service `cosmos`: The packing mode type. Supported values:`STANDARD_UNPACKED_MODE`- Unpacks _one_ layer of nested fields and infers types.`PACKED_MODE`- Delivers packed data as a single destination column value.Learn more in our [Azure Cosmos DB Sync Pack Mode Options documentation](https://fivetran.com/docs/connectors/databases/cosmos#packmodeoptions). - - Service `documentdb`: Indicates whether synced data will be packed into a single entry(column), or unpacked with one layer of nested fields. -- `sync_pull_api` (Boolean) Field usage depends on `service` value: - - Service `appsflyer`: These options are for Appsflyer's Pull API, and are only necessary for syncing events from Pull API. -- `sync_type` (String) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka sync type. Unpacked messages must be valid JSON. - - Service `aws_msk`: The sync type. Unpacked messages must be valid JSON. - - Service `azure_event_hub`: Sync type. Unpacked messages must be valid JSON. - - Service `azure_service_bus`: The sync type, which is based on the message type. For `text` and `xml`, `packed` is supported. For `protobuf` and `avro`, `unpacked` is supported. For `json`, both `packed` and `unpacked` are supported - - Service `confluent_cloud`: Kafka sync type. Unpacked messages must be valid JSON. - - Service `heroku_kafka`: Heroku Kafka sync type. Unpacked messages must be valid JSON. - - Service `segment`: The Segment connector sync type. -- `sysnr` (String) -- `system_id` (String) Field usage depends on `service` value: - - Service `hana_sap_hva_ecc_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to System ID. - - Service `hana_sap_hva_s4_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to System ID. -- `table_name` (String) Field usage depends on `service` value: - - Service `airtable`: Name of table in Airtable -- `target_entity_id` (String) Field usage depends on `service` value: - - Service `culture_amp`: Your Culture Amp Target entity ID. -- `target_host` (String) Field usage depends on `service` value: - - Service `d2l_brightspace`: Your D2L Brightspace target host. -- `tde_certificate` (String, Sensitive) Field usage depends on `service` value: - - Service `sql_server_hva`: Certificate used to protect a database encryption key - - Service `sql_server_sap_ecc_hva`: Certificate used to protect a database encryption key -- `tde_certificate_name` (String) Field usage depends on `service` value: - - Service `sql_server_hva`: Name of the Certificate used to protect a database encryption key - - Service `sql_server_sap_ecc_hva`: Name of the Certificate used to protect a database encryption key -- `tde_password` (String, Sensitive) Field usage depends on `service` value: - - Service `sql_server_hva`: Password of the TDE private key - - Service `sql_server_sap_ecc_hva`: Password of the TDE private key -- `tde_private_key` (String, Sensitive) Field usage depends on `service` value: - - Service `sql_server_hva`: Private key associated with the TDE certificate - - Service `sql_server_sap_ecc_hva`: Private key associated with the TDE certificate -- `team_id` (String) Field usage depends on `service` value: - - Service `asana`: Team ID -- `technical_account_id` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: Technical Account ID from the Service Account (JWT) credentials of your Adobe Project. -- `template_labels` (Set of String) Field usage depends on `service` value: - - Service `mandrill`: Provide the labels to filter the templates -- `tenant` (String) Field usage depends on `service` value: - - Service `microsoft_entra_id`: Your Microsoft Entra ID Tenant. - - Service `microsoft_teams`: Your Microsoft Teams Tenant. - - Service `unicommerce`: Your uniware tenant. - - Service `workday`: Workday tenant name - - Service `workday_financial_management`: Workday tenant name - - Service `workday_hcm`: Workday tenant name -- `tenant_app_url` (String) Field usage depends on `service` value: - - Service `planful`: Your Planful tenant app URL. -- `tenant_configs` (Block Set) (see [below for nested schema](#nestedblock--config--tenant_configs)) -- `tenant_id` (String, Sensitive) Field usage depends on `service` value: - - Service `azure_sql_db`: Azure AD tenant ID. - - Service `azure_sql_managed_db`: Azure AD tenant ID. - - Service `business_central`: `Tenant ID` of your Business Central application - - Service `crowddev`: Your crowd.dev Tenant ID. - - Service `reltio`: Your Reltio tenant ID. - - Service `servicetitan`: Your ServiceTitan tenant ID. - - Service `visma`: Your Visma tenant ID. -- `tenant_name` (String) Field usage depends on `service` value: - - Service `mambu`: Your Mambu tenant name. -- `tenant_url` (String) Field usage depends on `service` value: - - Service `ivanti`: Your Ivanti Tenant URL. - - Service `playvox_workforce_management`: Your Playvox Workforce Management Tenant URL. - - Service `reltio`: Your Reltio tenant URL. -- `test_table_name` (String) Field usage depends on `service` value: - - Service `sap_hana`: testTableName - - Service `sap_s4hana`: testTableName -- `time_zone` (String) Field usage depends on `service` value: - - Service `pardot`: The time zone configured in your Pardot instance. An empty value defaults to `UTC+00:00`. -- `timeframe_months` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector has been created. Default value: `TWELVE` . - - Service `adroll`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. - - Service `apple_search_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. - - Service `bingads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. - - Service `criteo`: The number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. NOTE: The greater number of months means the initial sync will take more time. - - Service `double_click_campaign_manager`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. Default value: `TWELVE`. - - Service `double_click_publishers`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. - - Service `facebook`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `facebook_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `google_ads`: The number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled custom and prebuilt reports during the next connector sync. Default value: `TWELVE`. - - Service `google_analytics`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. The default value: `TWELVE`. - - Service `google_analytics_4`: The number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. The default value: `TWELVE`. - - Service `google_analytics_mcf`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. Default value: `TWELVE`. - - Service `google_display_and_video_360`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. NOTE: The more months of reporting data you sync, the longer your initial sync will take. - - Service `google_search_ads_360`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. - - Service `google_search_console`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. - - Service `instagram_business`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. - - Service `itunes_connect`: Historical sync time frame in months. - - Service `linkedin_ads`: Number of months for which to query reporting data included in the initial sync. This number cannot be modified once the connector is created. Default value: `ALL_TIME`. - - Service `outbrain`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. - - Service `pinterest_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `reddit_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. - - Service `snapchat_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. - - Service `spotify_ads`: The number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. - - Service `taboola`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. - - Service `the_trade_desk`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `tiktok_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `twitter`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `twitter_ads`: Historical sync timeframe in months. - - Service `walmart_dsp`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `yahoo_dsp`: Number of months` worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. - - Service `yahoo_gemini`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. -- `tns` (String) Field usage depends on `service` value: - - Service `oracle_hva`: Single-tenant database: The database's SID. Multi-tenant database: The database's TNS. - - Service `oracle_sap_hva`: Single-tenant database: The database SID. Multi-tenant database: The database TNS. -- `toast_id` (String) Field usage depends on `service` value: - - Service `toast`: Your Toast Restaurant External ID. -- `token` (String, Sensitive) Field usage depends on `service` value: - - Service `mode`: Your Mode Token. - - Service `oracle_moat_analytics`: Your Oracle Moat Analytics Token. - - Service `solarwinds_service_desk`: Your SolarWinds Service Desk token. -- `token_authenticated_container` (String) Field usage depends on `service` value: - - Service `cosmos`: The container name. Required for the `RESOURCE_TOKEN` data access method. -- `token_authenticated_database` (String) Field usage depends on `service` value: - - Service `cosmos`: The database name. Required for the `RESOURCE_TOKEN` data access method. -- `token_id` (String, Sensitive) Field usage depends on `service` value: - - Service `chargedesk`: Your ChargeDesk token ID. - - Service `mux`: Your Mux token ID -- `token_key` (String, Sensitive) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: Token ID - - Service `on24`: Your ON24 token key. - - Service `proofpoint_security_awareness`: Your Proofpoint Security Awareness Token Key. -- `token_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `netsuite_suiteanalytics`: Token Secret - - Service `on24`: Your ON24 token secret. -- `token_secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `mux`: Your Mux token secret key -- `topics` (Set of String) Field usage depends on `service` value: - - Service `azure_service_bus`: The comma-separated list of topics which should be synced. Required if you do not have manage permissions -- `trust_store_type` (String) Field usage depends on `service` value: - - Service `heroku_kafka`: Trust Store Type -- `trusted_cert` (String, Sensitive) Field usage depends on `service` value: - - Service `apache_kafka`: Kafka trusted certificate. - - Service `heroku_kafka`: Heroku Kafka trusted certificate. Required for `TLS` security protocol. -- `truststore` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_msk`: If `security_protocol` is set to `TLS`, add the `Truststore File` as Base64 encoded string. -- `tunnel_host` (String) Field usage depends on `service` value: - - Service `aurora`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `aurora_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `azure_cosmos_for_mongo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `azure_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `azure_sql_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `azure_sql_managed_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `clarity`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `cockroachdb`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `db2i_hva`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). Required for connector creation. - - Service `db2i_sap_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `documentdb`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). Required for connector creation. - - Service `dynamics_365_fo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `ehr`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `elastic_cloud`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). - - Service `es_self_hosted`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). - - Service `google_cloud_mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `google_cloud_postgresql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `google_cloud_sqlserver`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). - - Service `hana_sap_hva_b1`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `hana_sap_hva_ecc`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `hana_sap_hva_ecc_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `hana_sap_hva_s4`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `hana_sap_hva_s4_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `heroku_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `magento_mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `magento_mysql_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `maria`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `maria_azure`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `maria_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `mongo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `mongo_sharded`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `mysql_azure`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `mysql_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `opendistro`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). - - Service `opensearch`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). - - Service `oracle`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_ebs`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_rac`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_sap_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `oracle_sap_hva_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `postgres_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sap_hana`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sap_hana_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sap_s4hana`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sftp`: Tunnel host address, specify only to connect via SSH tunnel. - - Service `sql_server`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sql_server_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sql_server_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). - - Service `sql_server_sap_ecc_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). -- `tunnel_port` (Number) Field usage depends on `service` value: - - Service `aurora`: SSH port, specify only to connect via an SSH tunnel. - - Service `aurora_postgres`: SSH port, specify only to connect via an SSH tunnel. - - Service `azure_cosmos_for_mongo`: SSH port, specify only to connect via an SSH tunnel. - - Service `azure_postgres`: SSH port, specify only to connect via an SSH tunnel. - - Service `azure_sql_db`: SSH port, specify only to connect via an SSH tunnel. - - Service `azure_sql_managed_db`: SSH port, specify only to connect via an SSH tunnel. - - Service `clarity`: SSH port, specify only to connect via an SSH tunnel. - - Service `cockroachdb`: SSH port, specify only to connect via an SSH tunnel. - - Service `db2i_hva`: SSH port, only specify when connecting via an SSH tunnel. Required for connector creation. - - Service `db2i_sap_hva`: SSH port, specify only to connect via an SSH tunnel. - - Service `documentdb`: SSH port, only specify when connecting via an SSH tunnel. Required for connector creation. - - Service `dynamics_365_fo`: SSH port, specify only to connect via an SSH tunnel. - - Service `ehr`: SSH port, specify only to connect via an SSH tunnel. - - Service `elastic_cloud`: SSH port, specify only to connect using an SSH tunnel. - - Service `es_self_hosted`: SSH port, specify only to connect using an SSH tunnel. - - Service `google_cloud_mysql`: SSH port, specify only to connect via an SSH tunnel. - - Service `google_cloud_postgresql`: SSH port, specify only to connect via an SSH tunnel. - - Service `google_cloud_sqlserver`: SSH port, only specify when connecting via an SSH tunnel. - - Service `hana_sap_hva_b1`: SSH port, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_ecc`: SSH port, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_ecc_netweaver`: SSH port, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_s4`: SSH port, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_s4_netweaver`: SSH port, specify only to connect via an SSH tunnel. - - Service `heroku_postgres`: SSH port, specify only to connect via an SSH tunnel. - - Service `magento_mysql`: SSH port, specify only to connect via an SSH tunnel. - - Service `magento_mysql_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `maria`: SSH port, specify only to connect via an SSH tunnel. - - Service `maria_azure`: SSH port, specify only to connect via an SSH tunnel. - - Service `maria_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `mongo`: SSH port, specify only to connect via an SSH tunnel. - - Service `mongo_sharded`: SSH port, specify only to connect via an SSH tunnel. - - Service `mysql`: SSH port, specify only to connect via an SSH tunnel. - - Service `mysql_azure`: SSH port, specify only to connect via an SSH tunnel. - - Service `mysql_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `opendistro`: SSH port, specify only to connect using an SSH tunnel. - - Service `opensearch`: SSH port, specify only to connect using an SSH tunnel. - - Service `oracle`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_ebs`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_hva`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_rac`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_sap_hva`: SSH port, specify only to connect via an SSH tunnel. - - Service `oracle_sap_hva_netweaver`: SSH port, specify only to connect via an SSH tunnel. - - Service `postgres`: SSH port, specify only to connect via an SSH tunnel. - - Service `postgres_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `sap_hana`: SSH port, specify only to connect via an SSH tunnel. - - Service `sap_hana_db`: SSH port, specify only to connect via an SSH tunnel. - - Service `sap_s4hana`: SSH port, specify only to connect via an SSH tunnel. - - Service `sftp`: Tunnel port, specify only to connect via SSH tunnel. - - Service `sql_server`: SSH port, specify only to connect via an SSH tunnel. - - Service `sql_server_hva`: SSH port, specify only to connect via an SSH tunnel. - - Service `sql_server_rds`: SSH port, specify only to connect via an SSH tunnel. - - Service `sql_server_sap_ecc_hva`: SSH port, specify only to connect via an SSH tunnel. -- `tunnel_user` (String) Field usage depends on `service` value: - - Service `aurora`: SSH user, specify only to connect via an SSH tunnel. - - Service `aurora_postgres`: SSH user, specify only to connect via an SSH tunnel. - - Service `azure_cosmos_for_mongo`: SSH user, specify only to connect via an SSH tunnel. - - Service `azure_postgres`: SSH user, specify only to connect via an SSH tunnel. - - Service `azure_sql_db`: SSH user, specify only to connect via an SSH tunnel. - - Service `azure_sql_managed_db`: SSH user, specify only to connect via an SSH tunnel. - - Service `clarity`: SSH user, specify only to connect via an SSH tunnel. - - Service `cockroachdb`: SSH user, specify only to connect via an SSH tunnel. - - Service `db2i_hva`: SSH user, specify only to connect via an SSH tunnel. Required for connector creation. - - Service `db2i_sap_hva`: SSH user, specify only to connect via an SSH tunnel. - - Service `documentdb`: SSH user, specify only to connect via an SSH tunnel. Required for connector creation. - - Service `dynamics_365_fo`: SSH user, specify only to connect via an SSH tunnel. - - Service `ehr`: SSH user, specify only to connect via an SSH tunnel. - - Service `elastic_cloud`: SSH user, specify only to connect using an SSH tunnel. - - Service `es_self_hosted`: SSH user, specify only to connect using an SSH tunnel. - - Service `google_cloud_mysql`: SSH user, specify only to connect via an SSH tunnel. - - Service `google_cloud_postgresql`: SSH user, specify only to connect via an SSH tunnel. - - Service `google_cloud_sqlserver`: SSH user, only specify when connecting via an SSH tunnel. - - Service `hana_sap_hva_b1`: SSH user, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_ecc`: SSH user, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_ecc_netweaver`: SSH user, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_s4`: SSH user, specify only to connect via an SSH tunnel. - - Service `hana_sap_hva_s4_netweaver`: SSH user, specify only to connect via an SSH tunnel. - - Service `heroku_postgres`: SSH user, specify only to connect via an SSH tunnel. - - Service `magento_mysql`: SSH user, specify only to connect via an SSH tunnel. - - Service `magento_mysql_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `maria`: SSH user, specify only to connect via an SSH tunnel. - - Service `maria_azure`: SSH user, specify only to connect via an SSH tunnel. - - Service `maria_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `mongo`: SSH user, specify only to connect via an SSH tunnel. - - Service `mongo_sharded`: SSH user, specify only to connect via an SSH tunnel. - - Service `mysql`: SSH user, specify only to connect via an SSH tunnel. - - Service `mysql_azure`: SSH user, specify only to connect via an SSH tunnel. - - Service `mysql_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `opendistro`: SSH user, specify only to connect using an SSH tunnel. - - Service `opensearch`: SSH user, specify only to connect using an SSH tunnel. - - Service `oracle`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_ebs`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_hva`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_rac`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_sap_hva`: SSH user, specify only to connect via an SSH tunnel. - - Service `oracle_sap_hva_netweaver`: SSH user, specify only to connect via an SSH tunnel. - - Service `postgres`: SSH user, specify only to connect via an SSH tunnel. - - Service `postgres_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `sap_hana`: SSH user, specify only to connect via an SSH tunnel. - - Service `sap_hana_db`: SSH user, specify only to connect via an SSH tunnel. - - Service `sap_s4hana`: SSH user, specify only to connect via an SSH tunnel. - - Service `sftp`: Tunnel user, specify only to connect via SSH tunnel. - - Service `sql_server`: SSH user, specify only to connect via an SSH tunnel. - - Service `sql_server_hva`: SSH user, specify only to connect via an SSH tunnel. - - Service `sql_server_rds`: SSH user, specify only to connect via an SSH tunnel. - - Service `sql_server_sap_ecc_hva`: SSH user, specify only to connect via an SSH tunnel. -- `type_name` (String) Field usage depends on `service` value: - - Service `akamai`: Your Akamai type name. - - Service `bubble`: Your Bubble type name. -- `unique_id` (String) -- `update_config_on_each_sync` (Boolean) Field usage depends on `service` value: - - Service `google_display_and_video_360`: Specifies whether the configuration is updated before each sync or only when the connector settings are saved. This parameter only takes effect when `config_method` is set to `REUSE_EXISTING`. The default value is `true`. -- `update_method` (String) Field usage depends on `service` value: - - Service `aurora`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `aurora_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `azure_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `azure_sql_db`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `azure_sql_managed_db`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `clarity`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `dynamics_365_fo`: Update Method - - Service `ehr`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `google_cloud_mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `google_cloud_postgresql`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `google_cloud_sqlserver`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `heroku_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `magento_mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `magento_mysql_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `maria`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `maria_azure`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `maria_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `mysql_azure`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `mysql_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle_ebs`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle_hva`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle_rac`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle_rds`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `oracle_sap_hva`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `postgres_rds`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. - - Service `redshift_db`: Default value: `Teleport` - - Service `snowflake_db`: Default value: `Teleport` - - Service `sql_server`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `sql_server_hva`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `sql_server_rds`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. - - Service `sql_server_sap_ecc_hva`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. -- `uri` (String) Field usage depends on `service` value: - - Service `cosmos`: Cosmos resource instance address. -- `url_format` (String) Field usage depends on `service` value: - - Service `fountain`: Your Fountain URL format. -- `use_api_keys` (Boolean) Field usage depends on `service` value: - - Service `mandrill`: Whether to use multiple API keys for interaction. -- `use_customer_bucket` (Boolean) Field usage depends on `service` value: - - Service `appsflyer`: Use Custom Bucket. Set it to 'true' if the data is being synced to your S3 bucket instead of an AppsFlyer-managed bucket. -- `use_oracle_rac` (Boolean) Field usage depends on `service` value: - - Service `oracle_hva`: Default value: `false`. Set to `true` if you're using a RAC instance. - - Service `oracle_sap_hva`: Default value: `false`. Set to `true` if you're using a RAC instance. -- `use_pgp_encryption_options` (Boolean) Field usage depends on `service` value: - - Service `azure_blob_storage`: Set to `true` if files present in the Azure Blob Storage container are encrypted using PGP. Default value: `false`. - - Service `ftp`: Set to `true` if files are encrypted using PGP in the S3 bucket. Default value: `false`. - - Service `gcs`: Set to `true` if files are encrypted using PGP in the GCS bucket. Default value: `false`. - - Service `s3`: Set to `true` if files are encrypted using PGP in the S3 bucket. Default value: `false`. - - Service `sftp`: Set to `true` if files present in SFTP server are encrypted using PGP. Default value: `false`. - - Service `wasabi_cloud_storage`: Set to `true` if files are encrypted using PGP in the Wasabi Cloud Storage bucket. Default value: `false`. -- `use_service_account` (Boolean) Field usage depends on `service` value: - - Service `bigquery_db`: BigQuery use service account; default is false -- `use_template_labels` (Boolean) Field usage depends on `service` value: - - Service `mandrill`: Use template labels to filter templates for sync -- `use_webhooks` (Boolean) Field usage depends on `service` value: - - Service `github`: Set to `true` to capture deletes. - - Service `xero`: Updates to few fields like sent_to_contact in Invoice table might be missed if you don't enable this. -- `use_workspace` (Boolean) Field usage depends on `service` value: - - Service `bigquery_db`: Create and drop tables in a query results dataset. Default is false - - Service `snowflake_db`: Choose a database and schema to create temporary tables for syncs. -- `user` (String) Field usage depends on `service` value: - - Service `aurora`: The user name. - - Service `aurora_postgres`: The user name. - - Service `azure_cosmos_for_mongo`: Username for source database access. - - Service `azure_postgres`: The user name. - - Service `azure_sql_db`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `azure_sql_managed_db`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `clarity`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `cockroachdb`: The user name. - - Service `db2i_hva`: The user name. - - Service `db2i_sap_hva`: The username. - - Service `documentdb`: The user name. - - Service `dynamics_365_fo`: The user name. The format must be `user@domain`. - - Service `ehr`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `elastic_cloud`: The user name. - - Service `es_self_hosted`: The user name. - - Service `ftp`: FTP user. - - Service `google_cloud_mysql`: The user name. - - Service `google_cloud_postgresql`: The user name. - - Service `google_cloud_sqlserver`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `hana_sap_hva_b1`: The username. - - Service `hana_sap_hva_ecc`: The username. - - Service `hana_sap_hva_ecc_netweaver`: The username. - - Service `hana_sap_hva_s4`: The username. - - Service `hana_sap_hva_s4_netweaver`: The username. - - Service `heroku_postgres`: The user name. - - Service `jira`: The Jira username. - - Service `magento_mysql`: The user name. - - Service `magento_mysql_rds`: The user name. - - Service `maria`: The user name. - - Service `maria_azure`: The user name. - - Service `maria_rds`: The user name. - - Service `marin`: The Marin username. - - Service `mongo`: The user name. - - Service `mongo_sharded`: The user name. - - Service `mysql`: The user name. - - Service `mysql_azure`: The user name. - - Service `mysql_rds`: The user name. - - Service `opendistro`: The user name. - - Service `opensearch`: The user name. - - Service `oracle`: The user name. - - Service `oracle_ebs`: The user name. - - Service `oracle_hva`: The user name. - - Service `oracle_rac`: The user name. - - Service `oracle_rds`: The user name. - - Service `oracle_sap_hva`: The username. - - Service `oracle_sap_hva_netweaver`: The username. - - Service `outbrain`: The username or email of the Outbrain user. - - Service `postgres`: The user name. - - Service `postgres_rds`: The user name. - - Service `redshift_db`: The Redshift username. - - Service `sap_hana`: Your SAP HANA user name. - - Service `sap_s4hana`: Your SAP S/4 user name. - - Service `sftp`: SFTP user. - - Service `snowflake_db`: The Snowflake username. - - Service `splunk`: The Splunk username. - - Service `sql_server`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `sql_server_hva`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `sql_server_rds`: The user name. For Azure Databases, the format must be `user@domain`. - - Service `sql_server_sap_ecc_hva`: The user name. For Azure Databases, the format must be `user@domain`. -- `user_id` (String) Field usage depends on `service` value: - - Service `coassemble`: Your Coassemble user ID. - - Service `gmail`: Your Gmail user ID. - - Service `hibob`: Your HiBob Service User Token. - - Service `marketo`: Marketo SOAP API User Id. - - Service `okendo`: Your Okendo user ID. - - Service `playvox`: Your Playvox User ID. - - Service `sage_intacct`: User ID -- `user_key` (String, Sensitive) -- `user_name` (String) Field usage depends on `service` value: - - Service `workday`: Workday username. -- `user_profiles` (Set of String) Field usage depends on `service` value: - - Service `double_click_campaign_manager`: IDs of specific User Profiles to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. -- `user_token` (String, Sensitive) Field usage depends on `service` value: - - Service `hibob`: Your HiBob Service User ID. - - Service `konnect_insights`: Your Konnect Insights User Token. - - Service `sonarqube`: Your Sonarqube user token. -- `username` (String) Field usage depends on `service` value: - - Service `absorb_lms`: Your Absorb LMS username. - - Service `adobe_commerce`: Your Adobe Commerce username. - - Service `anaplan`: Your Anaplan user ID. Must be populated if `auth_mode` is set to `Basic`. - - Service `appfigures`: Your Appfigures Username. - - Service `ceridian_dayforce`: Your Ceridian Dayforce Username. - - Service `churnzero`: Your ChurnZero username. - - Service `cin7`: Your Cin7 API Username. - - Service `collibra`: Your collibra username. - - Service `concur`: The SAP Concur username. - - Service `confluence`: Your Confluence username. - - Service `contrast_security`: Your Contrast Security API Username. - - Service `dcl_logistics`: Your DCL Logistics username. - - Service `github`: `Login` of your GitHub profile. - - Service `gladly`: Your Gladly Username. - - Service `globalmeet`: Your GlobalMeet Username. - - Service `gorgias`: Your Gorgias username. - - Service `green_power_monitor`: Your GreenPowerMonitor username. - - Service `guru`: Your Guru username. - - Service `impact`: Your Impact Account SID - - Service `integral_ad_science`: Your integral_ad_science username. - - Service `itunes_connect`: Your Apple ID - - Service `jamf`: Your Jamf username. - - Service `khoros_care`: Your Khoros Care username. - - Service `kissmetrics`: Your Kissmetrics API Username. - - Service `klarna`: Your Klarna Username. - - Service `learnupon`: Your Learnupon username. - - Service `lessonly`: Your Lessonly username. - - Service `mailgun`: Your Mailgun API username. - - Service `myosh`: Your myosh username. - - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence username. - - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud username. - - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud username. - - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud username. - - Service `partnerize`: Your Partnerize account's username. - - Service `pingdom`: Your Pingdom Username. - - Service `podio`: Your Podio username. - - Service `quorum`: Your Quorum username . - - Service `revx`: Your RevX Username. - - Service `rtb_house`: Your RTB House username. - - Service `sap_business_by_design`: The SAP Business ByDesign username. - - Service `scorm`: Your Scorm App ID. - - Service `servicenow`: Your ServiceNow User ID (username). - - Service `shiphero`: Your ShipHero username. - - Service `shipstation`: Your ShipStation username. - - Service `shopware`: Your Shopware username. - - Service `splash`: Your Splash username. - - Service `starrez`: Your StarRez API username - - Service `stylight`: Your Stylight Username. - - Service `teamwork`: Your Teamwork username. - - Service `testrail`: Your TestRail username. - - Service `ukg_pro`: Your UKG Pro username. - - Service `unicommerce`: Your uniware login username. - - Service `upland`: Your Upland Software Username. - - Service `veevavault`: Your Veeva Vault username. - - Service `when_i_work`: Your When I Work username. - - Service `wherefour`: Your Wherefour username. - - Service `workday_financial_management`: Workday username. - - Service `workday_hcm`: Username of your Workday Integration System User account - - Service `xandr`: Your Xandr username. - - Service `younium`: Your Younium username. -- `version` (String) Field usage depends on `service` value: - - Service `criteo_retail_media`: Your Criteo Retail Media version. -- `view_attribution_window` (String) Field usage depends on `service` value: - - Service `facebook`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#viewattributionwindow). - - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'view' action. - - Service `snapchat_ads`: The time period to attribute conversions based on views. Default value: `DAY_1` -- `view_through_attribution_window_size` (String) Field usage depends on `service` value: - - Service `linkedin_ads`: The time period to attribute conversions based on views. Default value: `DAY_7` -- `webhook_endpoint` (String) Field usage depends on `service` value: - - Service `appsflyer`: Webhook Url -- `webhook_key` (String) Field usage depends on `service` value: - - Service `xero`: Webhook Key -- `webhook_url` (String) Field usage depends on `service` value: - - Service `branch`: Webhook URL - - Service `pipedrive`: The registered URL for webhooks in your Pipedrive dashboard. - - Service `segment`: Webhook URL. - - Service `xero`: (ReadOnly) The Webhook URL generated by Fivetran. You can configure this in XERO. -- `word_press_site_id_or_woocommerce_domain_name` (String) Field usage depends on `service` value: - - Service `woocommerce`: The Site ID of your WordPress hosted WooCommerce instance or the subdomain of your self-hosted WooCommerce instance. -- `workplace_id` (String) Field usage depends on `service` value: - - Service `moloco`: Your Moloco workplace ID. -- `workspace` (String) Field usage depends on `service` value: - - Service `mode`: Your Mode Workspace. -- `workspace_name` (String) Field usage depends on `service` value: - - Service `bigquery_db`: Workspace Dataset Name - - Service `snowflake_db`: The name of the database where the temporary tables will be created. -- `workspace_same_as_source` (Boolean) Field usage depends on `service` value: - - Service `bigquery_db`: Use the source dataset as the workspace dataset -- `workspace_schema` (String) Field usage depends on `service` value: - - Service `snowflake_db`: The name of the schema that belongs to the workspace database where the temporary tables will be created. -- `ws_certificate` (String, Sensitive) Field usage depends on `service` value: - - Service `adp_workforce_now`: Web Services Certificate. -- `x_api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X API key. -- `x_key` (String, Sensitive) Field usage depends on `service` value: - - Service `medallia_agile_research`: Your Medallia Agile Research key. -- `x_master_key` (String, Sensitive) Field usage depends on `service` value: - - Service `medallia_agile_research`: Your Medallia Agile Research master key. -- `x_user_email` (String) Field usage depends on `service` value: - - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X User Email. -- `x_user_token` (String, Sensitive) Field usage depends on `service` value: - - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X User Token. - -Read-Only: - -- `authorization_method` (String) -- `last_synced_changes__utc_` (String) -- `latest_version` (String) -- `service_version` (String) -- `subscriber_name` (String) Field usage depends on `service` value: - - Service `azure_service_bus`: The subscriber name. If the connection string does not have manage permission, you need to specify a subscriber name we can use to fetch data. If not specified, we default to `fivetran_sub_schema` - - -### Nested Schema for `config.accounts_reddit_ads` - -Optional: - -- `name` (String) Field usage depends on `service` value: - - Service `reddit_ads`: Reddit username of the additional linked account. - - - -### Nested Schema for `config.adobe_analytics_configurations` - -Optional: - -- `calculated_metrics` (Set of String) Field usage depends on `service` value: - - Service `adobe_analytics`: The calculated_metrics that you want to sync. -- `elements` (Set of String) Field usage depends on `service` value: - - Service `adobe_analytics`: The elements that you want to sync. -- `metrics` (Set of String) Field usage depends on `service` value: - - Service `adobe_analytics`: The metrics that you want to sync. -- `report_suites` (Set of String) Field usage depends on `service` value: - - Service `adobe_analytics`: Specific report suites to sync. Must be populated if `sync_mode` is set to `SpecificReportSuites`. -- `segments` (Set of String) Field usage depends on `service` value: - - Service `adobe_analytics`: The segments that you want to sync. -- `sync_mode` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: Whether to sync all report suites or specific report suites. Default value: `AllReportSuites` . -- `table` (String) Field usage depends on `service` value: - - Service `adobe_analytics`: The table name unique within the schema to which connector will sync the data. Required for connector creation. - - - -### Nested Schema for `config.app_ids_appsflyer` - -Optional: - -- `app_id` (String) Field usage depends on `service` value: - - Service `appsflyer`: Your App ID - - - -### Nested Schema for `config.custom_payloads` - -Optional: - -- `key` (String) Field usage depends on `service` value: - - Service `aws_lambda`: Payload Key - - Service `azure_function`: Payload Key - - Service `google_cloud_function`: Payload Key -- `value` (String) Field usage depends on `service` value: - - Service `aws_lambda`: Payload Value - - Service `azure_function`: Payload Value - - Service `google_cloud_function`: Payload Value - - - -### Nested Schema for `config.custom_reports` - -Optional: - -- `add_metric_variants` (Boolean) Field usage depends on `service` value: - - Service `snapchat_ads`: Add fields for separate \"swipe-up\" and \"view\" variants of selected metrics -- `aggregate` (String) Field usage depends on `service` value: - - Service `tiktok_ads`: Time aggregation of report -- `base_metrics_fields` (Set of String) Field usage depends on `service` value: - - Service `snapchat_ads`: [List of Core, Additional and Conversion Metrics Stats Fields](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#basemetricsfields). -- `breakdown` (String) Field usage depends on `service` value: - - Service `snapchat_ads`: [Sets Breakdown on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#breakdown). -- `breakout` (String) Field usage depends on `service` value: - - Service `snapchat_ads`: [Sets Breakout on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#breakout). -- `conversions_report_included` (Boolean) Field usage depends on `service` value: - - Service `reddit_ads`: The boolean value specifying whether to enable or disable event conversions data synchronisation. Default value: `false` -- `custom_events_included` (Boolean) Field usage depends on `service` value: - - Service `reddit_ads`: The boolean value specifying whether the custom events are included in event conversions report. Default value: `false` -- `dimension` (String) Field usage depends on `service` value: - - Service `snapchat_ads`: [Sets Dimension on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#dimension). -- `dimensions` (Set of String) Field usage depends on `service` value: - - Service `tiktok_ads`: Dimensions to synced -- `event_names` (Set of String) Field usage depends on `service` value: - - Service `reddit_ads`: The list of events the conversion data will be synchronised for -- `granularity` (String) Field usage depends on `service` value: - - Service `snapchat_ads`: [Sets Granularity on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/customr-reports#granularity). -- `level` (String) Field usage depends on `service` value: - - Service `reddit_ads`: Level of custom report. -- `metrics` (Set of String) Field usage depends on `service` value: - - Service `tiktok_ads`: Metrics to be synced -- `report_fields` (Set of String) Field usage depends on `service` value: - - Service `reddit_ads`: The list of fields included in custom report -- `report_name` (String) Field usage depends on `service` value: - - Service `reddit_ads`: The table name within the schema to which connector syncs the data of the specific report. - - Service `snapchat_ads`: Custom report name (must be unique) -- `report_type` (String) Field usage depends on `service` value: - - Service `tiktok_ads`: Type of report to be generated -- `segmentation` (String) Field usage depends on `service` value: - - Service `reddit_ads`: Level of custom report. -- `sk_ad_metrics_fields` (Set of String) Field usage depends on `service` value: - - Service `snapchat_ads`: [List of SKAd Metrics fields in custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#skadmetricsfields). -- `table_name` (String) Field usage depends on `service` value: - - Service `tiktok_ads`: Destination Table name of report -- `time_zone` (String) Field usage depends on `service` value: - - Service `reddit_ads`: The specific time zone to sync report data if `useAccountTimeZone` set to `false`. -- `time_zone_mode` (String) Field usage depends on `service` value: - - Service `reddit_ads`: When this parameter is set to `ACCOUNT`, connector will use account related time zone to sync report data. Default value: `ACCOUNT`. Possible values: `ACCOUNT`, `USER` - - - -### Nested Schema for `config.custom_tables` - -Optional: - -- `action_breakdowns` (Set of String) Field usage depends on `service` value: - - Service `facebook_ads`: List of action_breakdowns which connector will sync. [Possible action_breakdowns values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#actionbreakdowns). -- `action_report_time` (String) Field usage depends on `service` value: - - Service `facebook_ads`: The report time of action stats. [Possible action_report time values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#actionreporttime). -- `aggregation` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Options to select aggregation duration. [Possible aggregation values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#aggregation). -- `breakdowns` (Set of String) Field usage depends on `service` value: - - Service `facebook_ads`: List of breakdowns which connector will sync. [Possible breakdowns values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#breakdowns). -- `click_attribution_window` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#clickattributionwindow). -- `config_type` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Option to select Prebuilt Reports or Custom Reports. [Possible config_type values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#configtype). -- `engaged_view_attribution_window` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Time period to attribute conversions based on engaged views. [Possible view_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads#engagedviewattributionwindow). -- `fields` (Set of String) Field usage depends on `service` value: - - Service `facebook_ads`: List of fields which connector will sync. [Possible field values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#fields). -- `level` (String) -- `prebuilt_report_name` (String) Field usage depends on `service` value: - - Service `facebook_ads`: The report name to which connector will sync the data. [Possible prebuilt_report values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#prebuiltreport). -- `table_name` (String) Field usage depends on `service` value: - - Service `facebook_ads`: The table name within the schema to which the connector will sync the data. It must be unique within the connector and must comply with [Fivetran's naming conventions](https://fivetran.com/docs/getting-started/core-concepts#namingconventions). -- `use_unified_attribution_setting` (Boolean) -- `view_attribution_window` (String) Field usage depends on `service` value: - - Service `facebook_ads`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#viewattributionwindow). - - - -### Nested Schema for `config.project_credentials` - -Optional: - -- `api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `amplitude`: The API key of the project. -- `project` (String) Field usage depends on `service` value: - - Service `amplitude`: The project name you wish to use with Fivetran. -- `secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `amplitude`: The secret key of the project. - - - -### Nested Schema for `config.report_configs` - -Optional: - -- `config_type` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Set the value to `PREBUILT` if it's one of the preconfigured reports (see the `prebuilt_report_type` option). Otherwise, set to `CUSTOM`. -- `currency` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: [Currency](https://developer.yahooinc.com/dsp/api/docs/reporting/payloadspec.html) used in a report. Default value: `USD`. -- `dimensions` (Set of String) Field usage depends on `service` value: - - Service `yahoo_dsp`: [Dimensions](https://developer.yahooinc.com/dsp/api/docs/reporting/dimensions.html) used in a report. Must be populated if `config_type` is set to `CUSTOM`. -- `interval_type` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: The [granularity](https://developer.yahooinc.com/dsp/api/docs/reporting/range-examples.html#interval-type-id) of data in a report. Default value: `DAY`. -- `metrics` (Set of String) Field usage depends on `service` value: - - Service `yahoo_dsp`: [Metrics](https://developer.yahooinc.com/dsp/api/docs/reporting/metrics.html) used in a report. Must be populated if `config_type` is set to `CUSTOM`. -- `prebuilt_report_type` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Specific report type to sync. Must be populated if `config_type` is set to `PREBUILT`. -- `report_name` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Table name in destination. -- `time_zone` (String) Field usage depends on `service` value: - - Service `yahoo_dsp`: Specify the time zone to be used to request report data -- `use_advertiser_timezone` (Boolean) Field usage depends on `service` value: - - Service `yahoo_dsp`: Use advertiser timezone to request report data. - - - -### Nested Schema for `config.report_list` - -Optional: - -- `dimension` (String) Field usage depends on `service` value: - - Service `spotify_ads`: The dimension (entity_type) to sync. -- `fields` (Set of String) Field usage depends on `service` value: - - Service `spotify_ads`: A list of the fields (metrics) to sync. -- `granularity` (String) Field usage depends on `service` value: - - Service `spotify_ads`: The report granularity. -- `table` (String) Field usage depends on `service` value: - - Service `spotify_ads`: The table name within the schema to which connector will sync the data of the specific report. - - - -### Nested Schema for `config.reports` - -Optional: - -- `aggregation` (String) Field usage depends on `service` value: - - Service `google_search_console`: (Optional) Aggregation type. Supported only for the `SEARCH_RESULTS` report type -- `attributes` (Set of String) Field usage depends on `service` value: - - Service `google_search_ads_360`: The report attributes included to sync. -- `config_type` (String) Field usage depends on `service` value: - - Service `google_analytics`: Whether to use the [Prebuilt Reports or Custom Reports](https://fivetran.com/docs/connectors/applications/google-analytics#schemainformation). - - Service `google_analytics_4`: Whether to use the Prebuilt Reports or Custom Reports. -- `dimensions` (Set of String) Field usage depends on `service` value: - - Service `google_analytics`: The report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. - - Service `google_analytics_4`: The report dimensions to include into a sync. - - Service `google_search_console`: The report dimensions included to sync. -- `fields` (Set of String) Field usage depends on `service` value: - - Service `google_ads`: A list of the fields to sync. Must be populated if `config_type` is set to `Custom`. -- `filter` (String) Field usage depends on `service` value: - - Service `google_analytics`: String parameter restricts the data returned for your report. To use the filter parameter, specify a dimension or metric on which to filter, followed by the filter expression -- `filter_field_name` (String) Field usage depends on `service` value: - - Service `google_analytics_4`: The dimension name to filter on. -- `filter_type` (String) Field usage depends on `service` value: - - Service `google_analytics_4`: Filter type for reports request. Possible values are INCLUDE and EXCLUDE -- `filter_value` (String) -- `metrics` (Set of String) Field usage depends on `service` value: - - Service `google_analytics`: The report metrics to include into a sync. - - Service `google_analytics_4`: The report metrics to include into a sync. - - Service `google_search_ads_360`: The report metrics included to sync. -- `prebuilt_report` (String) Field usage depends on `service` value: - - Service `google_analytics`: The name of the Prebuilt Report from which the connector will sync the data. - - Service `google_analytics_4`: The name of the Prebuilt Report from which the connector will sync the data. -- `report_type` (String) Field usage depends on `service` value: - - Service `google_ads`: The name of the Google Ads report from which the connector will sync the data. [Possible report_type values](https://developers.google.com/adwords/api/docs/appendix/reports#report-types). - - Service `google_search_ads_360`: The type of report - - Service `google_search_console`: The type of report -- `rollback_window` (Number) Field usage depends on `service` value: - - Service `google_analytics_4`: The custom window size for rollback syncs. -- `search_types` (Set of String) Field usage depends on `service` value: - - Service `google_search_console`: Search types included to sync. Supported only for the `SEARCH_RESULTS` report type -- `segment_ids` (Set of String) -- `segments` (Set of String) Field usage depends on `service` value: - - Service `google_analytics`: A segment is a subset of your Analytics data that is made up of one or more non-destructive filters (filters that do not alter the underlying data). Those filters isolate subsets of users, sessions, and hits. - - Service `google_search_ads_360`: The report segments included to sync. -- `table` (String) Field usage depends on `service` value: - - Service `google_ads`: The table name within the schema to which connector will sync the data of the specific report. - - Service `google_analytics`: The table name within the schema to which connector will sync the data of the specific report. - - Service `google_analytics_4`: The table name within the schema to which connector will sync the data of the specific report. - - Service `google_search_ads_360`: The name of a table within the schema to which connector syncs the data of a given report. - - Service `google_search_console`: The name of a table within the schema to which connector syncs the data of a given report. -- `time_aggregation_granularity` (String) Field usage depends on `service` value: - - Service `google_analytics_4`: The report data aggregation time granularity. - - - -### Nested Schema for `config.secrets_list` - -Optional: - -- `key` (String) Field usage depends on `service` value: - - Service `aws_lambda`: Secret Key. - - Service `azure_function`: Key - - Service `google_cloud_function`: Key -- `value` (String, Sensitive) Field usage depends on `service` value: - - Service `aws_lambda`: Secret Value. - - Service `azure_function`: Value - - Service `google_cloud_function`: Value - - - -### Nested Schema for `config.tenant_configs` - -Optional: - -- `subdomain` (String) Field usage depends on `service` value: - - Service `reltio`: Your Reltio subdomain. -- `tenant_id` (String) Field usage depends on `service` value: - - Service `reltio`: Your Reltio tenant ID. - - - - -### Nested Schema for `destination_schema` - -Optional: - -- `name` (String) The connector schema name in destination. Has to be unique within the group (destination). Required for connector creation. -- `prefix` (String) The connector schema prefix has to be unique within the group (destination). Each replicated schema is prefixed with the provided value. Required for connector creation. -- `table` (String) The table name unique within the schema to which connector will sync the data. Required for connector creation. - - - -### Nested Schema for `timeouts` - -Optional: - -- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). -- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). - -## Import - -1. To import an existing `fivetran_connector` resource into your Terraform state, you need to get **Fivetran Connector ID** on the **Setup** tab of the connector page in your Fivetran dashboard. - -2. Retrieve all connectors in a particular group using the [fivetran_group_connectors data source](/docs/data-sources/group_connectors). To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). - -3. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_connector" "my_imported_connector" { - -} -``` - -4. Run the `terraform import` command: - -``` -terraform import fivetran_connector.my_imported_connector {your Fivetran Connector ID} -``` - -5. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_connector.my_imported_connector' -``` -6. Copy the values and paste them to your `.tf` configuration. - --> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to connectors. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/connectors/config) for reference to find the properties you need to keep in the `config` section. - -### How to authorize connector - -## GitHub connector example - -To authorize a GitHub connector via terraform using personal access token you should specify `auth_mode`, `username` and `pat` inside `config` block instead of `auth` and set `run_setup_tests` to `true`: - -```hcl -resource "fivetran_connector" "my_github_connector" { - group_id = "group_id" - service = "github" - run_setup_tests = "true" - - destination_schema { - name = "github_connector" - } - - config { - sync_mode = "AllRepositories" - use_webhooks = "false" - auth_mode = "PersonalAccessToken" - username = "git-hub-user-name" - pat = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" - } -} -``` \ No newline at end of file diff --git a/docs/resources/connector_certificates.md b/docs/resources/connector_certificates.md deleted file mode 100644 index 87bf8da2..00000000 --- a/docs/resources/connector_certificates.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "fivetran_connector_certificates Resource - terraform-provider-fivetran" -subcategory: "" -description: |- - ---- - -# fivetran_connector_certificates (Resource) - - - - - - -## Schema - -### Required - -- `connector_id` (String) The unique identifier for the target connection within the Fivetran system. - -### Optional - -- `certificate` (Block Set) (see [below for nested schema](#nestedblock--certificate)) - -### Read-Only - -- `id` (String) The unique identifier for the resource. Equal to target connection id. - - -### Nested Schema for `certificate` - -Required: - -- `encoded_cert` (String, Sensitive) Base64 encoded certificate. -- `hash` (String) Hash of the certificate. - -Read-Only: - -- `name` (String) Certificate name. -- `public_key` (String) The SSH public key. -- `sha1` (String) Certificate sha1. -- `sha256` (String) Certificate sha256. -- `type` (String) Type of the certificate. -- `validated_by` (String) User name who validated the certificate. -- `validated_date` (String) The date when certificate was approved. diff --git a/docs/resources/connector_fingerprints.md b/docs/resources/connector_fingerprints.md deleted file mode 100644 index 64d33b6d..00000000 --- a/docs/resources/connector_fingerprints.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -page_title: "Resource: fivetran_connector_fingerprints" ---- - -# Resource: fivetran_connector_fingerprints - -This resource allows you to manage list of approved SSH fingerprints for a particular connector. - -## Example Usage - -```hcl -resource "fivetran_connector_fingerprints" "my_connector_approved_fingerprints" { - connector_id = fivetran_connector.my_connector.id - fingerprint { - hash = "jhgfJfgrI6yy..." - public_key= "ssh-rsa CCCCB3NzaC1yc2ECCASFWFWDFRWT5WAS ... fivetran user key" - } - fingerprint { - hash = "eUtPirI6yytWe..." - public_key= "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6 ... fivetran user key" - } -} -``` - - -## Schema - -### Required - -- `connector_id` (String) The unique identifier for the target connection within the Fivetran system. - -### Optional - -- `fingerprint` (Block Set) (see [below for nested schema](#nestedblock--fingerprint)) - -### Read-Only - -- `id` (String) The unique identifier for the resource. Equal to target connection id. - - -### Nested Schema for `fingerprint` - -Required: - -- `hash` (String) Hash of the fingerprint. -- `public_key` (String) The SSH public key. - -Read-Only: - -- `validated_by` (String) User name who validated the fingerprint. -- `validated_date` (String) The date when fingerprint was approved. - -## Import - -1. To import an existing `fivetran_connector_fingerprints` resource into your Terraform state, you need to get **Fivetran Connector ID** on the **Setup** tab of the connector page in your Fivetran dashboard. - -2. Retrieve all connectors in a particular group using the [fivetran_group_connectors data source](/docs/data-sources/group_connectors). To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). - -3. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_connector_fingerprints" "my_imported_connector_fingerprints" { - -} -``` - -4. Run the `terraform import` command: - -``` -terraform import fivetran_connector_fingerprints.my_imported_connector_fingerprints {your Fivetran Connector ID} -``` - -5. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_connector_fingerprints.my_imported_connector_fingerprints' -``` - -6. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/connector_schedule.md b/docs/resources/connector_schedule.md deleted file mode 100644 index b467da70..00000000 --- a/docs/resources/connector_schedule.md +++ /dev/null @@ -1,73 +0,0 @@ ---- -page_title: "Resource: fivetran_connector_schedule" ---- - -# Resource: fivetran_connector_schedule - --This resource allows you to manage connectors schedule: pause/unpause connector, set daily_sync_time and sync_frequency. - -## Example Usage - -```hcl -resource "fivetran_connector_schedule" "my_connector_schedule" { - connector_id = fivetran_connector.my_connector.id - - sync_frequency = "1440" - daily_sync_time = "03:00" - - paused = false - pause_after_trial = true - - schedule_type = "auto" -} -``` - - -## Schema - -### Required - -- `connector_id` (String) The unique identifier for the connector within the Fivetran system. - -### Optional - -- `daily_sync_time` (String) The optional parameter that defines the sync start time when the sync frequency is already set or being set by the current request to 1440. It can be specified in one hour increments starting from 00:00 to 23:00. If not specified, we will use [the baseline sync start time](https://fivetran.com/docs/getting-started/syncoverview#syncfrequencyandscheduling). This parameter has no effect on the [0 to 60 minutes offset](https://fivetran.com/docs/getting-started/syncoverview#syncstarttimesandoffsets) used to determine the actual sync start time. -- `pause_after_trial` (String) Specifies whether the connector should be paused after the free trial period has ended. -- `paused` (String) Specifies whether the connector is paused. -- `schedule_type` (String) The connector schedule configuration type. Supported values: auto, manual. -- `sync_frequency` (String) The connector sync frequency in minutes. Supported values: 1, 5, 15, 30, 60, 120, 180, 360, 480, 720, 1440. - -### Read-Only - -- `id` (String) The unique resource identifier (equals to `connector_id`). - -## Import - -You don't need to import this resource as it is synthetic. - -To fetch schedule values from existing connector use `fivetran_connector` data source: -```hcl -data "fivetran_connector" "my_connector" { - id = "my_connector_id" -} - -# now you can use schedule values from this data_source: -# sync_frequency = data.fivetran_connector.my_connector.sync_frequency -# paused = data.fivetran_connector.my_connector.paused -``` - -This resource manages settings for already existing connector instance and doesn't create a new one. -If you already have an existing connector with id = `my_connector_id` just define `fivetran_connector_schedule` resource: - -```hcl -resource "fivetran_connector_schedule" "my_connector_schedule" { - connector_id = "my_connector_id" - - sync_frequency = "360" - paused = false - pause_after_trial = true - schedule_type = "auto" -} -``` - --> NOTE: You can't have several resources managing the same `connector_id`. They will be in conflict ater each `apply`. \ No newline at end of file diff --git a/docs/resources/connector_schema_config.md b/docs/resources/connector_schema_config.md deleted file mode 100644 index e465ed2c..00000000 --- a/docs/resources/connector_schema_config.md +++ /dev/null @@ -1,280 +0,0 @@ - --- -page_title: "Resource: fivetran_connector_schema_config" ---- - -# Resource: fivetran_connector_schema_config - -This resource allows you to manage the Standard Configuration settings of a connector: - - Define the schema change handling settings - - Enable and disable schemas, tables, and columns - -The resource is in **ALPHA** state. The resource schema and behavior are subject to change without prior notice. - -Known issues: - - Definition of `sync_mode` for table may cause infinite drifting changes in plan. - - Using `schema` field causes very slow plan preparation because of slow performance for SetTypable fields in terraform-framework, please use MapTypable `schemas` field instead. - -## Usage guide - -Note that all configuration settings are aligned to the `schema_change_handling` settings, except the settings explicitly specified in `schemas`. -In `schemas`, you only override the default settings defined by the chosen `schema_change_handling` option. -The allowed `schema_change_handling` options are as follows: -- `ALLOW_ALL`- all schemas, tables and columns are ENABLED by default. You only need to explicitly specify DISABLED items or hashed tables -- `BLOCK_ALL` - all schemas, tables and columns are DISABLED by default, the configuration only specifies ENABLED items -- `ALLOW_COLUMNS` - all schemas and tables are DISABLED by default, but all columns are ENABLED by default, the configuration specifies ENABLED schemas and tables, and DISABLED columns - -Note that system-enabled tables and columns (such as primary and foreign key columns, and [system tables and columns](https://fivetran.com/docs/getting-started/system-columns-and-tables)) are synced regardless of the `schema_change_handling` settings and configuration. You can only [disable non-locked columns in the system-enabled tables](#nestedblock--nonlocked). If the configuration specifies any system tables or locked system table columns as disabled ( `enabled = "false"`), the provider just ignores these statements. - -## Usage examples - -### Example for the ALLOW_ALL option - -In `schemas`, you only need to specify schemas and tables you want to disable (`enabled = "false"`) and columns you want to disable or hash (`hashed = "true"`). - -```hcl -resource "fivetran_connector_schema_config" "schema" { - connector_id = "connector_id" - schema_change_handling = "ALLOW_ALL" - schemas = { - "schema_name" = { - tables = { - "table_name" = { - columns = { - "hashed_column_name" = { - hashed = true - } - "blocked_column_name" = { - enabled = false - } - } - } - "blocked_table_name" = { - enabled = false - } - } - } - "blocked_schema" = { - enabled = false - } - } -} -``` - -The configuration resulting from the example request is as follows: -- All new and existing schemas except `blocked_schema` are enabled -- All new and existing tables in the `schema_name` schema except the `blocked_table_name` table are enabled -- All new and existing columns in the`table_name` of the `schema_name` schema except the `blocked_column_name` column are enabled -- The `hashed_column_name` column is hashed in the `table_name` table in the `schema_name` schema -- All new schemas, tables, and columns are enabled once captured by the connector during the sync except those disabled by the system - -### Example for the BLOCK_ALL option - -```hcl -resource "fivetran_connector_schema_config" "schema" { - connector_id = "connector_id" - schema_change_handling = "BLOCK_ALL" - schemas = { - "schema_name" = { - tables = { - "table_name" = { - columns = { - "hashed_column_name" = { - hashed = true - } - } - } - "enabled_table_name" = { - enabled = true - } - } - } - "enabled_schema" ={ - enabled = true - } - } -} -``` - -The configuration resulting from the example request is as follows: - -- All new and existing schemas except the `enabled_schema` and `schema_name` are disabled -- Only system-enabled tables and columns are enabled in the `enabled_schema` schema -- All new and existing tables in the `schema_name` schema except the `enabled_table_name`, `table_name` tables and system tables are disabled -- All new and existing columns in the `table_name` table of the `schema_name` schema are disabled except the `hashed_column_name` column and system columns -- The `hashed_column_name` column in the `table_name` table the `schema_name` schema is hashed -- All new columns except the system-enabled columns, all schemas and tables are disabled once captured by the connector during the sync - -### Example for the ALLOW_COLUMNS option - -In `schemas`, you only need to specify schemas and tables you want to enable `enabled = "true"`) and columns you want to disable (`enabled = "false"`) or hash (`hashed = "true"`). - -```hcl -resource "fivetran_connector_schema_config" "schema" { - connector_id = "connector_id" - schema_change_handling = "ALLOW_COLUMNS" - schemas = { - "schema_name" = { - tables = { - "table_name" = { - columns = { - "hashed_column_name" = { - hashed = true - } - "disabled_column_name" = { - enabled = false - } - } - } - "enabled_table" = { - enabled = true - } - } - } - "enabled_schema_name" = { - enabled = true - } - } -} -``` - -The configuration resulting from the example request is as follows: - -- All specified existing schemas and tables are enabled and all columns inside them are enabled by default, unless `enabled = "false"` is specified for the column -- All new and existing schemas except the `enabled_schema_name` and `schema_name` are disabled -- Only system-enabled tables and columns would be enabled in the`enabled_schema_name` schema -- All new and existing tables in the `schema_name` schema except the `enabled_table_name`, `table_name` and system-enabled tables are disabled -- All new and existing columns in the`table_name` table of the `schema_name` schema except the `disabled_columns_name` and system-enabled columns are enabled -- The `hashed_column_name` would be hashed in table `table_name` in schema `schema_name` -- All new non system-enabled tables/schemas would be disabled once captured by connector on sync -- All new non system-enabled columns inside enabled tables (including system enabled-tables) would be enabled once captured by connector on sync - - -### Non-locked table column management in system-enabled tables - -You cannot manage system-enabled tables, but you can manage its non-locked columns. For example, your schema `schema_name` has a system-enabled table `system_enabled_table` that can't be disabled, and you want to disable one of its columns named `column_name`: - -```hcl -resource "fivetran_connector_schema_config" "schema" { - connector_id = "connector_id" - schema_change_handling = "ALLOW_COLUMNS" - schemas = { - "schema_name" = { - tables = { - "system_enabled_table" = { - columns = { - "column_name" = { - enabled = false - } - } - } - } - } - } -} -``` - - -## Schema - -### Required - -- `connector_id` (String) The unique identifier for the connector within the Fivetran system. - -### Optional - -- `schema` (Block Set, Deprecated) (see [below for nested schema](#nestedblock--schema)) -- `schema_change_handling` (String) The value specifying how new source data is handled. -- `schemas` (Attributes Map) Map of schema configurations. (see [below for nested schema](#nestedatt--schemas)) -- `schemas_json` (String) Schema settings in Json format, following Fivetran API endpoint contract for `schemas` field (a map of schemas). -- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) -- `validation_level` (String) The value defines validation method. -- NONE: no validation, any configuration accepted. -- TABLES: validate table names, fail on attempt to configure non-existing schemas/tables. -- COLUMNS: validate the whole schema config including column names. The resource will try to fetch columns for every configured table and verify column names. - -### Read-Only - -- `id` (String) The unique resource identifier (equals to `connector_id`). - - -### Nested Schema for `schema` - -Required: - -- `name` (String) The schema name within your destination in accordance with Fivetran conventional rules. - -Optional: - -- `enabled` (Boolean) The boolean value specifying whether the sync for the schema into the destination is enabled. -- `table` (Block Set) (see [below for nested schema](#nestedblock--schema--table)) - - -### Nested Schema for `schema.table` - -Required: - -- `name` (String) The table name within your destination in accordance with Fivetran conventional rules. - -Optional: - -- `column` (Block Set) (see [below for nested schema](#nestedblock--schema--table--column)) -- `enabled` (Boolean) The boolean value specifying whether the sync of table into the destination is enabled. -- `sync_mode` (String) This field appears in the response if the connector supports switching sync modes for tables. - - -### Nested Schema for `schema.table.column` - -Required: - -- `name` (String) The column name within your destination in accordance with Fivetran conventional rules. - -Optional: - -- `enabled` (Boolean) The boolean value specifying whether the sync of the column into the destination is enabled. -- `hashed` (Boolean) The boolean value specifying whether a column should be hashed. -- `is_primary_key` (Boolean) - - - - - -### Nested Schema for `schemas` - -Optional: - -- `enabled` (Boolean) The boolean value specifying whether the sync for the schema into the destination is enabled. -- `tables` (Attributes Map) Map of table configurations. (see [below for nested schema](#nestedatt--schemas--tables)) - - -### Nested Schema for `schemas.tables` - -Optional: - -- `columns` (Attributes Map) Map of table configurations. (see [below for nested schema](#nestedatt--schemas--tables--columns)) -- `enabled` (Boolean) The boolean value specifying whether the sync for the table into the destination is enabled. -- `sync_mode` (String) This field appears in the response if the connector supports switching sync modes for tables. - - -### Nested Schema for `schemas.tables.columns` - -Optional: - -- `enabled` (Boolean) The boolean value specifying whether the sync of the column into the destination is enabled. -- `hashed` (Boolean) The boolean value specifying whether a column should be hashed. -- `is_primary_key` (Boolean) - - - - - -### Nested Schema for `timeouts` - -Optional: - -- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). -- `read` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Read operations occur during any refresh or planning operation when refresh is enabled. -- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). - -## Import - -You don't need to import this resource as it is synthetic (doesn't create new instances in upstream). \ No newline at end of file diff --git a/docs/resources/dbt_git_project_config.md b/docs/resources/dbt_git_project_config.md deleted file mode 100644 index 81f29780..00000000 --- a/docs/resources/dbt_git_project_config.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -page_title: "Resource: fivetran_dbt_git_project_config" ---- - -# Resource: fivetran_dbt_git_project_config - -Resource is in ALPHA state. - -This resource allows you to add and manage dbt Git Projects Configs. - -## Example Usage - -```hcl -resource "fivetran_dbt_git_project_config" "git_project_config" { - project_id = "project_id" - git_remote_url = "your_git_remote_url" - git_branch = "main" - folder_path = "/dbt/project/folder/path" -} -``` - - -## Schema - -### Required - -- `project_id` (String) The unique identifier for the dbt Project within the Fivetran system. - -### Optional - -- `ensure_readiness` (Boolean) Should resource wait for project to finish initialization. Default value: false. -- `folder_path` (String) Folder in Git repo with your dbt project. -- `git_branch` (String) Git branch. -- `git_remote_url` (String) Git remote URL with your dbt project. - -### Read-Only - -- `id` (String) The unique identifier for the dbt Project within the Fivetran system. - -## Import - -1. To import an existing `fivetran_dbt_git_project_config` resource into your Terraform state, you need to get **Dbt Project ID** via API call `GET https://api.fivetran.com/v1/dbt/projects` to retrieve available projects. -2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/dbt/projects/{project-id}` to ensure that this is the project you want to import. -3. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_dbt_git_project_config" "my_imported_fivetran_dbt_git_project_config" { - -} -``` - -4. Run the `terraform import` command: - -``` -terraform import fivetran_dbt_git_project_config.my_imported_fivetran_dbt_git_project_config {Dbt Project ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_dbt_git_project_config.my_imported_fivetran_dbt_git_project_config' -``` - -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/dbt_project.md b/docs/resources/dbt_project.md deleted file mode 100644 index f6eccf5b..00000000 --- a/docs/resources/dbt_project.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -page_title: "Resource: fivetran_dbt_project" ---- - -# Resource: fivetran_dbt_project - -Resource is in ALPHA state. - -This resource allows you to add, manage and delete dbt Projects in your account. - -## Example Usage - -```hcl -resource "fivetran_dbt_project" "project" { - group_id = "group_id" - dbt_version = "1.4.1" - default_schema = "default_schema" - target_name = "target_name" - environment_vars = ["environment_var=value"] - threads = 8 - type = "GIT" -} -``` - - -## Schema - -### Required - -- `dbt_version` (String) The version of dbt that should run the project. We support the following versions: 0.18.0 - 0.18.2, 0.19.0 - 0.19.2, 0.20.0 - 0.20.2, 0.21.0 - 0.21.1, 1.0.0, 1.0.1, 1.0.3 - 1.0.9, 1.1.0 - 1.1.3, 1.2.0 - 1.2.4, 1.3.0 - 1.3.2, 1.4.1. -- `default_schema` (String) Default schema in destination. This production schema will contain your transformed data. -- `group_id` (String) The unique identifier for the group within the Fivetran system. - -### Optional - -- `ensure_readiness` (Boolean, Deprecated) Should resource wait for project to finish initialization. Default value: true. -- `environment_vars` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" -- `project_config` (Block, Optional, Deprecated) (see [below for nested schema](#nestedblock--project_config)) -- `target_name` (String) Target name to set or override the value from the deployment.yaml -- `threads` (Number) The number of threads dbt will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse. -- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) -- `type` (String) Type of dbt Project. Currently only `GIT` supported. Empty value will be considered as default (GIT). - -### Read-Only - -- `created_at` (String) The timestamp of the dbt Project creation. -- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the dbt Project. -- `id` (String) The unique identifier for the dbt Project within the Fivetran system. -- `models` (Attributes Set) (see [below for nested schema](#nestedatt--models)) -- `public_key` (String) Public key to grant Fivetran SSH access to git repository. -- `status` (String) Status of dbt Project (NOT_READY, READY, ERROR). - - -### Nested Schema for `project_config` - -Optional: - -- `folder_path` (String) Folder in Git repo with your dbt project -- `git_branch` (String) Git branch -- `git_remote_url` (String) Git remote URL with your dbt project - - - -### Nested Schema for `timeouts` - -Optional: - -- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). - - - -### Nested Schema for `models` - -Read-Only: - -- `id` (String) The unique identifier for the dbt Model within the Fivetran system. -- `model_name` (String) The dbt Model name. -- `scheduled` (Boolean) Boolean specifying whether the model is selected for execution in the dashboard. - -## Import - -1. To import an existing `fivetran_dbt_project` resource into your Terraform state, you need to get **Dbt Project ID** via API call `GET https://api.fivetran.com/v1/dbt/projects` to retrieve available projects. -2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/dbt/projects/{project-id}` to ensure that this is the project you want to import. -3. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_dbt_project" "my_imported_fivetran_dbt_project" { - -} -``` - -4. Run the `terraform import` command: - -``` -terraform import fivetran_dbt_project.my_imported_fivetran_dbt_project {Dbt Project ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_dbt_project.my_imported_fivetran_dbt_project' -``` - -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/dbt_transformation.md b/docs/resources/dbt_transformation.md deleted file mode 100644 index 98845ee1..00000000 --- a/docs/resources/dbt_transformation.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -page_title: "Resource: fivetran_dbt_transformation" ---- - -# Resource: fivetran_dbt_transformation - -Resource is in ALPHA state. - -This resource allows you to add, manage and delete dbt Transformations for existing dbt Model. -To retrieve available dbt Models use this [Retrieve dbt Project models](https://fivetran.com/docs/rest-api/dbt-transformation-management#retrievedbtprojectmodels) endpoint. - -## Example Usage - -```hcl -resource "fivetran_dbt_transformation" "transformation" { - dbt_model_name = "dbt_model_name" - dbt_project_id = "dbt_project_id" - run_tests = "false" - paused = "false" - schedule { - schedule_type = "TIME_OF_DAY" - time_of_day = "12:00" - days_of_week = ["MONDAY", "SATURDAY"] - } -} -``` - - -## Schema - -### Required - -- `dbt_model_name` (String) Target dbt Model name. -- `dbt_project_id` (String) The unique identifier for the dbt Project within the Fivetran system. - -### Optional - -- `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. -- `run_tests` (Boolean) The field indicating whether the tests have been configured for dbt Transformation. By default, the value is false. -- `schedule` (Block, Optional) (see [below for nested schema](#nestedblock--schedule)) -- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) - -### Read-Only - -- `connector_ids` (Set of String) Identifiers of related connectors. -- `created_at` (String) The timestamp of the dbt Transformation creation. -- `dbt_model_id` (String) The unique identifier for the dbt Model within the Fivetran system. -- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. -- `model_ids` (Set of String) Identifiers of related models. -- `output_model_name` (String) The dbt Model name. - - -### Nested Schema for `schedule` - -Optional: - -- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. -- `interval` (Number) The time interval in minutes between subsequent transformation runs. -- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. -- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" - - - -### Nested Schema for `timeouts` - -Optional: - -- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). - -## Import - -1. To import an existing `fivetran_dbt_transformation` resource into your Terraform state, you need to get **Transformation ID** on the transformation page in your Fivetran dashboard. -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_dbt_transformation" "my_imported_fivetran_dbt_transformation" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_dbt_transformation.my_imported_fivetran_dbt_transformation {Transformation ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_dbt_transformation.my_imported_fivetran_dbt_transformation' -``` - -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/destination.md b/docs/resources/destination.md deleted file mode 100644 index 9030064a..00000000 --- a/docs/resources/destination.md +++ /dev/null @@ -1,486 +0,0 @@ ---- -page_title: "Resource: fivetran_destination" ---- - -# Resource: fivetran_destination - -This resource allows you to create, update, and delete destinations. - -IMPORTANT: Groups and destinations are mapped 1:1 to each other. We do this mapping using the group's id value that we automatically generate when you create a group using our REST API, and the destination's group_id value that you specify when you create a destination using our REST API. This means that if you use our REST API to create a destination, you must create a group in your Fivetran account before you can create a destination in it. - -When you create a destination in your Fivetran dashboard, we automatically create a group and assign a value to its id and a destination with the same group_id value, which is unique in your Fivetran account. The group's name corresponds to the Destination name you specify in your Fivetran dashboard when creating the destination in your Fivetran dashboard. - -## Example Usage - -```hcl -resource "fivetran_destination" "dest" { - group_id = fivetran_group.group.id - service = "postgres_rds_warehouse" - time_zone_offset = "0" - region = "GCP_US_EAST4" - trust_certificates = "true" - trust_fingerprints = "true" - daylight_saving_time_enabled = "true" - run_setup_tests = "true" - - config { - host = "destination.fqdn" - port = 5432 - user = "postgres" - password = "myPass" - database = "fivetran" - connection_type = "Directly" - } -} -``` - - -## Schema - -### Required - -- `group_id` (String) The unique identifier for the Group within the Fivetran system. -- `region` (String) Data processing location. This is where Fivetran will operate and run computation on data. -- `service` (String) The destination type id within the Fivetran system. -- `time_zone_offset` (String) Determines the time zone for the Fivetran sync schedule. - -### Optional - -- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) -- `daylight_saving_time_enabled` (Boolean) Shift my UTC offset with daylight savings time (US Only) -- `hybrid_deployment_agent_id` (String) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. -- `local_processing_agent_id` (String, Deprecated) (Deprecated) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. -- `networking_method` (String) Possible values: Directly, SshTunnel, ProxyAgent. -- `private_link_id` (String) The private link ID. -- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is TRUE. -- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) -- `trust_certificates` (Boolean) Specifies whether we should trust the certificate automatically. The default value is FALSE. If a certificate is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination certificate](https://fivetran.com/docs/rest-api/certificates#approveadestinationcertificate). -- `trust_fingerprints` (Boolean) Specifies whether we should trust the SSH fingerprint automatically. The default value is FALSE. If a fingerprint is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination fingerprint](https://fivetran.com/docs/rest-api/certificates#approveadestinationfingerprint). - -### Read-Only - -- `id` (String) The unique identifier for the destination within the Fivetran system. -- `setup_status` (String) Destination setup status. - - -### Nested Schema for `config` - -Optional: - -- `always_encrypted` (Boolean) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `aurora_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_data_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_database`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_managed_db_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `panoply`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `periscope_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_gcp_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `redshift`: Require TLS through Tunnel - - Service `sql_server_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. -- `auth` (String) Field usage depends on `service` value: - - Service `snowflake`: Password-based or key-based authentication type -- `auth_type` (String) Field usage depends on `service` value: - - Service `databricks`: Authentication type - - Service `redshift`: Authentication type. Default value: `PASSWORD`. -- `bootstrap_servers` (Set of String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Comma-separated list of Confluent Cloud servers in the `server:port` format. -- `bucket` (String) Field usage depends on `service` value: - - Service `big_query`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. - - Service `big_query_dts`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. - - Service `managed_big_query`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. - - Service `new_s3_datalake`: The name of the bucket to be used as destination -- `catalog` (String) Field usage depends on `service` value: - - Service `databricks`: Catalog name -- `client_id` (String) Field usage depends on `service` value: - - Service `adls`: Client id of service principal - - Service `onelake`: Client ID of service principal -- `cloud_provider` (String) Field usage depends on `service` value: - - Service `databricks`: Databricks deployment cloud -- `cluster_id` (String) Field usage depends on `service` value: - - Service `panoply`: Cluster ID. - - Service `periscope_warehouse`: Cluster ID. - - Service `redshift`: Cluster ID. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. -- `cluster_region` (String) Field usage depends on `service` value: - - Service `panoply`: Cluster region. - - Service `periscope_warehouse`: Cluster region. - - Service `redshift`: Cluster region. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. -- `connection_method` (String) -- `connection_type` (String) Field usage depends on `service` value: - - Service `adls`: Connection method. Default value: `Directly`. - - Service `aurora_postgres_warehouse`: Connection method. Default value: `Directly`. - - Service `aurora_warehouse`: Connection method. Default value: `Directly`. - - Service `azure_postgres_warehouse`: Connection method. Default value: `Directly`. - - Service `azure_sql_data_warehouse`: Connection method. Default value: `Directly`. - - Service `azure_sql_database`: Connection method. Default value: `Directly`. - - Service `azure_sql_managed_db_warehouse`: Connection method. Default value: `Directly`. - - Service `databricks`: Connection method. Default value: `Directly`. - - Service `maria_rds_warehouse`: Connection method. Default value: `Directly`. - - Service `maria_warehouse`: Connection method. Default value: `Directly`. - - Service `mysql_rds_warehouse`: Connection method. Default value: `Directly`. - - Service `mysql_warehouse`: Connection method. Default value: `Directly`. - - Service `panoply`: Connection method. Default value: `Directly`. - - Service `periscope_warehouse`: Connection method. Default value: `Directly`. - - Service `postgres_gcp_warehouse`: Connection method. Default value: `Directly`. - - Service `postgres_rds_warehouse`: Connection method. Default value: `Directly`. - - Service `postgres_warehouse`: Connection method. Default value: `Directly`. - - Service `redshift`: Connection method. Default value: `Directly`. - - Service `snowflake`: Connection method. Default value: `Directly`. - - Service `sql_server_rds_warehouse`: Connection method. Default value: `Directly`. - - Service `sql_server_warehouse`: Connection method. Default value: `Directly`. -- `container_name` (String) Field usage depends on `service` value: - - Service `adls`: Container to store delta table files - - Service `onelake`: Workspace name to store delta table files -- `controller_id` (String) -- `create_external_tables` (Boolean) Field usage depends on `service` value: - - Service `databricks`: Whether to create external tables -- `data_format` (String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Confluent Cloud message format. -- `data_set_location` (String) Field usage depends on `service` value: - - Service `big_query`: Data location. Datasets will reside in this location. - - Service `big_query_dts`: Data location. Datasets will reside in this location. - - Service `managed_big_query`: Data location. Datasets will reside in this location. -- `database` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Database name - - Service `aurora_warehouse`: Database name - - Service `azure_postgres_warehouse`: Database name - - Service `azure_sql_data_warehouse`: Database name - - Service `azure_sql_database`: Database name - - Service `azure_sql_managed_db_warehouse`: Database name - - Service `maria_rds_warehouse`: Database name - - Service `maria_warehouse`: Database name - - Service `mysql_rds_warehouse`: Database name - - Service `mysql_warehouse`: Database name - - Service `panoply`: Database name - - Service `periscope_warehouse`: Database name - - Service `postgres_gcp_warehouse`: Database name - - Service `postgres_rds_warehouse`: Database name - - Service `postgres_warehouse`: Database name - - Service `redshift`: Database name - - Service `snowflake`: Database name - - Service `sql_server_rds_warehouse`: Database name - - Service `sql_server_warehouse`: Database name -- `enable_remote_execution` (Boolean) -- `external_location` (String) Field usage depends on `service` value: - - Service `databricks`: External location to store Delta tables. Default value: `""` (null). By default, the external tables will reside in the `/{schema}/{table}` path, and if you specify an external location in the `{externalLocation}/{schema}/{table}` path. -- `fivetran_glue_role_arn` (String) -- `fivetran_msk_role_arn` (String) -- `fivetran_role_arn` (String) Field usage depends on `service` value: - - Service `new_s3_datalake`: ARN of the role which you created with different required policy mentioned in our setup guide -- `host` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Server name - - Service `aurora_warehouse`: Server name - - Service `azure_postgres_warehouse`: Server name - - Service `azure_sql_data_warehouse`: Server name - - Service `azure_sql_database`: Server name - - Service `azure_sql_managed_db_warehouse`: Server name - - Service `maria_rds_warehouse`: Server name - - Service `maria_warehouse`: Server name - - Service `mysql_rds_warehouse`: Server name - - Service `mysql_warehouse`: Server name - - Service `panoply`: Server name - - Service `periscope_warehouse`: Server name - - Service `postgres_gcp_warehouse`: Server name - - Service `postgres_rds_warehouse`: Server name - - Service `postgres_warehouse`: Server name - - Service `redshift`: Server name - - Service `snowflake`: Server name - - Service `sql_server_rds_warehouse`: Server name - - Service `sql_server_warehouse`: Server name -- `http_path` (String) Field usage depends on `service` value: - - Service `databricks`: HTTP path -- `is_private_key_encrypted` (Boolean) Field usage depends on `service` value: - - Service `snowflake`: Indicates that a private key is encrypted. The default value: `false`. The field can be specified if authentication type is `KEY_PAIR`. -- `is_private_link_required` (Boolean) Field usage depends on `service` value: - - Service `new_s3_datalake`: We use PrivateLink by default if your s3 bucket is in the same region as Fivetran. Turning on this toggle ensures that Fivetran always connects to s3 bucket over PrivateLink. Learn more in our [PrivateLink documentation](https://fivetran.com/docs/connectors/databases/connection-options#awsprivatelinkbeta). -- `is_redshift_serverless` (Boolean) Field usage depends on `service` value: - - Service `redshift`: Is your destination Redshift Serverless -- `lakehouse_name` (String) Field usage depends on `service` value: - - Service `onelake`: Name of your lakehouse -- `msk_sts_region` (String) -- `num_of_partitions` (Number) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Number of partitions per topic. -- `oauth2_client_id` (String) Field usage depends on `service` value: - - Service `databricks`: OAuth 2.0 client ID -- `oauth2_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `databricks`: OAuth 2.0 secret -- `passphrase` (String, Sensitive) Field usage depends on `service` value: - - Service `snowflake`: In case private key is encrypted, you are required to enter passphrase that was used to encrypt the private key. The field can be specified if authentication type is `KEY_PAIR`. -- `password` (String, Sensitive) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Database user password - - Service `aurora_warehouse`: Database user password - - Service `azure_postgres_warehouse`: Database user password - - Service `azure_sql_data_warehouse`: Database user password - - Service `azure_sql_database`: Database user password - - Service `azure_sql_managed_db_warehouse`: Database user password - - Service `maria_rds_warehouse`: Database user password - - Service `maria_warehouse`: Database user password - - Service `mysql_rds_warehouse`: Database user password - - Service `mysql_warehouse`: Database user password - - Service `panoply`: Database user password - - Service `periscope_warehouse`: Database user password - - Service `postgres_gcp_warehouse`: Database user password - - Service `postgres_rds_warehouse`: Database user password - - Service `postgres_warehouse`: Database user password - - Service `redshift`: Database user password. Required if authentication type is `PASSWORD`. - - Service `snowflake`: Database user password. The field should be specified if authentication type is `PASSWORD`. - - Service `sql_server_rds_warehouse`: Database user password - - Service `sql_server_warehouse`: Database user password -- `personal_access_token` (String, Sensitive) Field usage depends on `service` value: - - Service `databricks`: Personal access token -- `port` (Number) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Server port number - - Service `aurora_warehouse`: Server port number - - Service `azure_postgres_warehouse`: Server port number - - Service `azure_sql_data_warehouse`: Server port number - - Service `azure_sql_database`: Server port number - - Service `azure_sql_managed_db_warehouse`: Server port number - - Service `databricks`: Server port number - - Service `maria_rds_warehouse`: Server port number - - Service `maria_warehouse`: Server port number - - Service `mysql_rds_warehouse`: Server port number - - Service `mysql_warehouse`: Server port number - - Service `panoply`: Server port number - - Service `periscope_warehouse`: Server port number - - Service `postgres_gcp_warehouse`: Server port number - - Service `postgres_rds_warehouse`: Server port number - - Service `postgres_warehouse`: Server port number - - Service `redshift`: Server port number - - Service `snowflake`: Server port number - - Service `sql_server_rds_warehouse`: Server port number - - Service `sql_server_warehouse`: Server port number -- `prefix_path` (String) Field usage depends on `service` value: - - Service `adls`: path/to/data within the container - - Service `new_s3_datalake`: Prefix path of the bucket for which you have configured access policy. It is not required if access has been granted to entire Bucket in the access policy - - Service `onelake`: path/to/data within your lakehouse inside the Files directory -- `private_key` (String, Sensitive) Field usage depends on `service` value: - - Service `snowflake`: Private access key. The field should be specified if authentication type is `KEY_PAIR`. -- `project_id` (String) Field usage depends on `service` value: - - Service `big_query`: BigQuery project ID -- `region` (String) Field usage depends on `service` value: - - Service `new_s3_datalake`: Region of your AWS S3 bucket -- `registry_name` (String) -- `registry_sts_region` (String) -- `replication_factor` (Number) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Replication factor. -- `resource_id` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `aurora_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `azure_postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `azure_sql_data_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `azure_sql_database`: Field to test Self serve Private Link - - Service `azure_sql_managed_db_warehouse`: Field to test Self serve Private Link - - Service `databricks`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `maria_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `maria_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `mysql_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `mysql_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `panoply`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `periscope_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `postgres_gcp_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `postgres_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `redshift`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `snowflake`: This field is currently being introduced to test the Self-serve Private Link functionality - - Service `sql_server_rds_warehouse`: Field to test Self serve Private Link - - Service `sql_server_warehouse`: Field to test Self serve Private Link -- `role` (String) Field usage depends on `service` value: - - Service `snowflake`: If not specified, Fivetran will use the user's default role -- `role_arn` (String, Sensitive) Field usage depends on `service` value: - - Service `redshift`: Role ARN with Redshift permissions. Required if authentication type is `IAM`. -- `sasl_mechanism` (String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Security protocol for Confluent Cloud interaction. -- `sasl_plain_key` (String, Sensitive) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Confluent Cloud SASL key. -- `sasl_plain_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Confluent Cloud SASL secret. -- `schema_compatibility` (String) -- `schema_registry` (String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Schema Registry -- `schema_registry_api_key` (String, Sensitive) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Schema registry API key. -- `schema_registry_api_secret` (String, Sensitive) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Schema registry API secret. -- `schema_registry_url` (String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Schema registry URL. -- `secret_key` (String, Sensitive) Field usage depends on `service` value: - - Service `big_query`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. - - Service `big_query_dts`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. - - Service `managed_big_query`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. -- `secret_value` (String, Sensitive) Field usage depends on `service` value: - - Service `adls`: Secret value for service principal - - Service `onelake`: Secret value for service principal -- `security_protocol` (String) Field usage depends on `service` value: - - Service `confluent_cloud_wh`: Security protocol for Confluent Cloud interaction. -- `server_host_name` (String) Field usage depends on `service` value: - - Service `databricks`: Server name -- `snapshot_retention_period` (String) Field usage depends on `service` value: - - Service `adls`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. - - Service `new_s3_datalake`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. - - Service `onelake`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. -- `snowflake_cloud` (String) -- `snowflake_region` (String) -- `storage_account_name` (String) Field usage depends on `service` value: - - Service `adls`: Storage account for Azure Data Lake Storage Gen2 name - - Service `onelake`: Storage account for Azure Data Lake Storage Gen2 name -- `table_format` (String) Field usage depends on `service` value: - - Service `new_s3_datalake`: (Immutable) The table format in which you want to sync your tables. Valid values are ICEBERG and DELTA_LAKE -- `tenant_id` (String) Field usage depends on `service` value: - - Service `adls`: Tenant id of service principal - - Service `onelake`: Tenant ID of service principal -- `tunnel_host` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `aurora_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_data_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_database`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_managed_db_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `panoply`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `periscope_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_gcp_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `redshift`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. -- `tunnel_port` (Number) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `aurora_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_data_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_database`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_managed_db_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `panoply`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `periscope_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_gcp_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `redshift`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. -- `tunnel_user` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `aurora_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_data_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_database`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `azure_sql_managed_db_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `maria_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `mysql_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `panoply`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `periscope_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_gcp_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `redshift`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. - - Service `sql_server_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. -- `user` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Database user name - - Service `aurora_warehouse`: Database user name - - Service `azure_postgres_warehouse`: Database user name - - Service `azure_sql_data_warehouse`: Database user name - - Service `azure_sql_database`: Database user name - - Service `azure_sql_managed_db_warehouse`: Database user name - - Service `maria_rds_warehouse`: Database user name - - Service `maria_warehouse`: Database user name - - Service `mysql_rds_warehouse`: Database user name - - Service `mysql_warehouse`: Database user name - - Service `panoply`: Database user name - - Service `periscope_warehouse`: Database user name - - Service `postgres_gcp_warehouse`: Database user name - - Service `postgres_rds_warehouse`: Database user name - - Service `postgres_warehouse`: Database user name - - Service `redshift`: Database user name - - Service `snowflake`: Database user name - - Service `sql_server_rds_warehouse`: Database user name - - Service `sql_server_warehouse`: Database user name -- `workspace_name` (String) Field usage depends on `service` value: - - Service `onelake`: OneLake workspace name - -Read-Only: - -- `external_id` (String) Field usage depends on `service` value: - - Service `aws_msk_wh`: Fivetran generated External ID - - Service `panoply`: Fivetran generated External ID - - Service `periscope_warehouse`: Fivetran generated External ID - - Service `redshift`: Fivetran generated External ID -- `public_key` (String) Field usage depends on `service` value: - - Service `aurora_postgres_warehouse`: Public Key - - Service `aurora_warehouse`: Public Key - - Service `azure_postgres_warehouse`: Public Key - - Service `azure_sql_data_warehouse`: Public Key - - Service `azure_sql_database`: Public Key - - Service `azure_sql_managed_db_warehouse`: Public Key - - Service `maria_rds_warehouse`: Public Key - - Service `maria_warehouse`: Public Key - - Service `mysql_rds_warehouse`: Public Key - - Service `mysql_warehouse`: Public Key - - Service `panoply`: Public Key - - Service `periscope_warehouse`: Public Key - - Service `postgres_gcp_warehouse`: Public Key - - Service `postgres_rds_warehouse`: Public Key - - Service `postgres_warehouse`: Public Key - - Service `redshift`: Public Key - - Service `sql_server_rds_warehouse`: Public Key - - Service `sql_server_warehouse`: Public Key - - - -### Nested Schema for `timeouts` - -Optional: - -- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). -- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). - -## Setup tests - -The `run_setup_tests` field doesn't have upstream value, it only defines local resource behavoir. This means that when you update only the `run_setup_tests` value (from `false` to `true`, for example) it won't cause any upstream actions. The value will be just saved in terraform state and then used on effective field updates. - -The default value is `false` - this means that no setup tests will be performed during create/update. To perform setup tests, you should set value to `true`. - -## Import - -1. To import an existing `fivetran_destination` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. -To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_destination" "my_imported_destination" { - -} -``` - -3. Run the `terraform import` command with the following parameters: - -``` -terraform import fivetran_destination.my_imported_destination {your Destination Group ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_destination.my_imported_destination' -``` -5. Copy the values and paste them to your `.tf` configuration. - --> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to destinations. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/destinations/config) for reference to find the properties you need to keep in the `config` section. \ No newline at end of file diff --git a/docs/resources/destination_certificates.md b/docs/resources/destination_certificates.md deleted file mode 100644 index 566d343e..00000000 --- a/docs/resources/destination_certificates.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "fivetran_destination_certificates Resource - terraform-provider-fivetran" -subcategory: "" -description: |- - ---- - -# fivetran_destination_certificates (Resource) - - - - - - -## Schema - -### Required - -- `destination_id` (String) The unique identifier for the target destination within the Fivetran system. - -### Optional - -- `certificate` (Block Set) (see [below for nested schema](#nestedblock--certificate)) - -### Read-Only - -- `id` (String) The unique identifier for the resource. Equal to target destination id. - - -### Nested Schema for `certificate` - -Required: - -- `encoded_cert` (String, Sensitive) Base64 encoded certificate. -- `hash` (String) Hash of the certificate. - -Read-Only: - -- `name` (String) Certificate name. -- `public_key` (String) The SSH public key. -- `sha1` (String) Certificate sha1. -- `sha256` (String) Certificate sha256. -- `type` (String) Type of the certificate. -- `validated_by` (String) User name who validated the certificate. -- `validated_date` (String) The date when certificate was approved. diff --git a/docs/resources/destination_fingerprints.md b/docs/resources/destination_fingerprints.md deleted file mode 100644 index cab7602e..00000000 --- a/docs/resources/destination_fingerprints.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -page_title: "Resource: fivetran_destination_fingerprints" ---- - -# Resource: fivetran_destination_fingerprints - -This resource allows you to manage list of approved SSH fingerprints for a particular destination. - -## Example Usage - -```hcl -resource "fivetran_destination_fingerprints" "my_destination_approved_fingerprints" { - destination_id = fivetran_destination.my_destination.id - fingerprint { - hash = "jhgfJfgrI6yy..." - public_key= "ssh-rsa CCCCB3NzaC1yc2ECCASFWFWDFRWT5WAS ... fivetran user key" - } - fingerprint { - hash = "eUtPirI6yytWe..." - public_key= "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6 ... fivetran user key" - } -} -``` - - -## Schema - -### Required - -- `destination_id` (String) The unique identifier for the target destination within the Fivetran system. - -### Optional - -- `fingerprint` (Block Set) (see [below for nested schema](#nestedblock--fingerprint)) - -### Read-Only - -- `id` (String) The unique identifier for the resource. Equal to target destination id. - - -### Nested Schema for `fingerprint` - -Required: - -- `hash` (String) Hash of the fingerprint. -- `public_key` (String) The SSH public key. - -Read-Only: - -- `validated_by` (String) User name who validated the fingerprint. -- `validated_date` (String) The date when fingerprint was approved. - -## Import - -1. To import an existing `fivetran_destination_fingerprints` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. - -2. To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). - -3. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_destination_fingerprints" "my_imported_destination_fingerprints" { - -} -``` - -4. Run the `terraform import` command: - -``` -terraform import fivetran_destination_fingerprints.my_imported_destination_fingerprints {your Destination Group ID} -``` - -5. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_destination_fingerprints.my_imported_destination_fingerprints' -``` - -6. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/external_logging.md b/docs/resources/external_logging.md deleted file mode 100644 index 8aa0fefa..00000000 --- a/docs/resources/external_logging.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -page_title: "Resource: fivetran_external_logging" ---- - -# Resource: fivetran_external_logging - -This resource allows you to create, update, and delete logging service. - -## Example Usage - -```hcl -resource "fivetran_external_logging" "extlog" { - group_id = fivetran_group.group.id - service = "azure_monitor_log" - enabled = "true" - run_setup_tests = "true" - - config { - workspace_id = "workspace_id" - primary_key = "PASSWORD" - } -} -``` - - -## Schema - -### Required - -- `group_id` (String) The unique identifier for the log service within the Fivetran system. -- `service` (String) The name for the log service type within the Fivetran system. We support the following log services: azure_monitor_log, cloudwatch, datadog_log, new_relic_log, splunkLog, stackdriver. - -### Optional - -- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) -- `enabled` (Boolean) The boolean value specifying whether the log service is enabled. -- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is TRUE. - -### Read-Only - -- `id` (String) The unique identifier for the log service within the Fivetran system. - - -### Nested Schema for `config` - -Optional: - -- `api_key` (String, Sensitive) API Key -- `channel` (String) Channel -- `enable_ssl` (Boolean) Enable SSL -- `external_id` (String) external_id -- `host` (String) Server name -- `hostname` (String) Server name -- `log_group_name` (String) Log Group Name -- `port` (Number) Port -- `primary_key` (String, Sensitive) Primary Key -- `project_id` (String) Project Id for Google Cloud Logging -- `region` (String) Region -- `role_arn` (String) Role Arn -- `sub_domain` (String) Sub Domain -- `token` (String, Sensitive) Token -- `workspace_id` (String) Workspace ID - -## Setup tests - -The `run_setup_tests` field doesn't have upstream value, it only defines local resource behavoir. This means that when you update only the `run_setup_tests` value (from `false` to `true`, for example) it won't cause any upstream actions. The value will be just saved in terraform state and then used on effective field updates. - -The default value is `false` - this means that no setup tests will be performed during create/update. To perform setup tests, you should set value to `true`. - -## Import - -1. To import an existing `fivetran_external_logging` resource into your Terraform state, you need to get **External Logging Group ID** on the external logging page in your Fivetran dashboard. -To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_external_logging" "my_imported_external_logging" { - -} -``` - -3. Run the `terraform import` command with the following parameters: - -``` -terraform import fivetran_external_logging.my_imported_external_logging {your External Logging Group ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_external_logging.my_imported_external_logging' -``` -5. Copy the values and paste them to your `.tf` configuration. - --> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to destinations. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/log-service-management#logservicesetupconfigurations) for reference to find the properties you need to keep in the `config` section. \ No newline at end of file diff --git a/docs/resources/group.md b/docs/resources/group.md deleted file mode 100644 index f60c4230..00000000 --- a/docs/resources/group.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -page_title: "Resource: fivetran_group" ---- - -# Resource: fivetran_group - -This resource allows you to create, update, and delete groups. - -IMPORTANT: Groups and destinations are mapped 1:1 to each other. We do this mapping using the group's id value that we automatically generate when you create a group using our Terrafrom Provider, and the destination's group_id value that you specify when you create a destination using our Terrafrom Provider. This means that if you use our Terrafrom Provider to create a destination, you must create a group in your Fivetran account before you can create a destination in it. - -When you create a destination in your Fivetran dashboard, we automatically create a group and assign a value to its id and a destination with the same group_id value, which is unique in your Fivetran account. The group's name corresponds to the Destination name you specify in your Fivetran dashboard when creating the destination in your Fivetran dashboard. - -## Example Usage - -```hcl -resource "fivetran_group" "group" { - name = "MyGroup" -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the group within your account. - -### Read-Only - -- `created_at` (String) The timestamp of when the group was created in your account. -- `id` (String) The unique identifier for the group within the Fivetran system. -- `last_updated` (String) The timestamp of when the resource/datasource was updated last time. - -## Import - -1. To import an existing `fivetran_group` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. -To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_group" "my_imported_fivetran_group" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_group.my_imported_fivetran_group {your Destination Group ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_group.my_imported_fivetran_group' -``` - -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/group_users.md b/docs/resources/group_users.md deleted file mode 100644 index 3cd0ab79..00000000 --- a/docs/resources/group_users.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -page_title: "Resource: fivetran_group_users" ---- - -# Resource: fivetran_group_users - -This resource allows you to create, update, and delete user memberships in groups. - -## Example Usage - -```hcl -resource "fivetran_group_users" "group_users" { - group_id = fivetran_group.group.id - - user { - email = "mail@example.com" - role = "Destination Analyst" - } - - user { - email = "another_mail@example.com" - role = "Destination Analyst" - } -} -``` - - -## Schema - -### Required - -- `group_id` (String) The unique identifier for the Group within the Fivetran system. - -### Optional - -- `user` (Block Set) (see [below for nested schema](#nestedblock--user)) - -### Read-Only - -- `id` (String) The unique identifier for the resource. -- `last_updated` (String) - - -### Nested Schema for `user` - -Required: - -- `email` (String) The email address that the user has associated with their user profile. -- `role` (String) The group role that you would like to assign this new user to. Supported group roles: ‘Destination Administrator‘, ‘Destination Reviewer‘, ‘Destination Analyst‘, ‘Connector Creator‘, or a custom destination role - -Read-Only: - -- `id` (String) The unique identifier for the user within the account. - --## Import - -1. To import an existing `fivetran_group_users` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. -To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_group_users" "my_imported_fivetran_group_users" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_group_users.my_imported_fivetran_group_users {your Destination Group ID} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_group_users.my_imported_fivetran_group_users' -``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/hybrid_deployment_agent.md b/docs/resources/hybrid_deployment_agent.md deleted file mode 100644 index 2b4ea831..00000000 --- a/docs/resources/hybrid_deployment_agent.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -page_title: "Resource: fivetran_hybrid_deployment_agent" ---- - -# Resource: fivetran_hybrid_deployment_agent - -This resource allows you to create, update, and delete hybrid deployment agents. - -## Example Usage - -```hcl -resource "fivetran_hybrid_deployment_agent" "hybrid_deployment_agent" { - provider = fivetran-provider - - display_name = "display_name" - group_id = "group_id" - auth_type = "AUTO" -} -``` - - -## Schema - -### Required - -- `auth_type` (String) Type of authentification. Possible values `AUTO`,`MANUAL` -- `display_name` (String) The unique name for the hybrid deployment agent. -- `group_id` (String) The unique identifier for the Group within the Fivetran system. - -### Optional - -- `authentication_counter` (Number) Determines whether re-authentication needs to be performed. - -### Read-Only - -- `auth_json` (String) Base64-encoded content of the auth.json file. -- `config_json` (String) Base64-encoded content of the config.json file. -- `docker_compose_yaml` (String) Base64-encoded content of the compose file for the chosen containerization type. -- `id` (String) The unique identifier for the hybrid deployment agent within your account. -- `registered_at` (String) The timestamp of the time the hybrid deployment agent was created in your account. -- `token` (String) Base64 encoded content of token. \ No newline at end of file diff --git a/docs/resources/local_processing_agent.md b/docs/resources/local_processing_agent.md deleted file mode 100644 index 9205511b..00000000 --- a/docs/resources/local_processing_agent.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -page_title: "Resource: fivetran_local_processing_agent" ---- - -# Resource: fivetran_local_processing_agent - -NOTE: In connection with the general availability of the hybrid deployment functionality and in order to synchronize internal terminology, we have deprecate this resource. - -This resource allows you to create, update, and delete local processing agents. - - -## Example Usage - -```hcl -resource "fivetran_local_processing_agent" "test_lpa" { - provider = fivetran-provider - - display_name = "display_name" - group_id = "group_id" -} -``` - - -## Schema - -### Required - -- `display_name` (String) The unique name for the local processing agent. -- `group_id` (String) The unique identifier for the Group within the Fivetran system. - -### Optional - -- `authentication_counter` (Number) Determines whether re-authentication needs to be performed. - -### Read-Only - -- `auth_json` (String) Base64-encoded content of the auth.json file. -- `config_json` (String) Base64-encoded content of the config.json file. -- `docker_compose_yaml` (String) Base64-encoded content of the compose file for the chosen containerization type. -- `id` (String) The unique identifier for the local processing agent within your account. -- `registered_at` (String) The timestamp of the time the local processing agent was created in your account. -- `usage` (Attributes Set) (see [below for nested schema](#nestedatt--usage)) - - -### Nested Schema for `usage` - -Required: - -- `schema` (String) The connection schema name. -- `service` (String) The connection type. - -Read-Only: - -- `connection_id` (String) The unique identifier of the connection associated with the agent. \ No newline at end of file diff --git a/docs/resources/private_link.md b/docs/resources/private_link.md deleted file mode 100644 index 8e11cca7..00000000 --- a/docs/resources/private_link.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -page_title: "Resource: fivetran_private_link" ---- - -# Resource: fivetran_private_link - -This resource allows you to create, update, and delete private links. - -## Example Usage - -```hcl -resource "fivetran_private_link" "test_pl" { - provider = fivetran-provider - - name = "name" - region = "region" - service = "service" - - config { - connection_service_name = "connection_service_name" - } -} -``` - - -## Schema - -### Required - -- `config_map` (Map of String) Configuration. - -#### Possible values --- `connection_service_name` (String): The name of your connection service. --- `account_url` (String): The URL of your account. --- `vpce_id` (String): The ID of your Virtual Private Cloud Endpoint. --- `aws_account_id` (String): The ID of your AWS account. --- `cluster_identifier` (String): The cluster identifier. --- `connection_service_id` (String): The ID of your connection service. --- `workspace_url` (String): The URL of your workspace. --- `pls_id` (String): The ID of your Azure Private Link service. --- `sub_resource_name` (String): The name of subresource. --- `private_dns_regions` (String): Private DNS Regions. --- `private_connection_service_id` (String): The ID of your connection service. -- `name` (String) The private link name within the account. The name must start with a letter or underscore and can only contain letters, numbers, or underscores. Maximum size of name is 23 characters. -- `region` (String) Data processing location. This is where Fivetran will operate and run computation on data. -- `service` (String) Service type. - -### Read-Only - -- `cloud_provider` (String) The cloud provider name. -- `created_at` (String) The date and time the membership was created. -- `created_by` (String) The unique identifier for the User within the Fivetran system. -- `id` (String) The unique identifier for the private link within the Fivetran system. -- `state` (String) The state of the private link. -- `state_summary` (String) The state of the private link. diff --git a/docs/resources/proxy_agent.md b/docs/resources/proxy_agent.md deleted file mode 100644 index 40e70b15..00000000 --- a/docs/resources/proxy_agent.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -page_title: "Resource: fivetran_proxy_agent" ---- - -# Resource: fivetran_proxy_agent - -This resource allows you to create, update, and delete proxy agent. - -## Example Usage - -```hcl -resource "fivetran_proxy_agent" "test_proxy_agent" { - provider = fivetran-provider - - display_name = "display_name" - group_region = "group_region" -} -``` - - -## Schema - -### Required - -- `display_name` (String) Proxy agent name. -- `group_region` (String) Data processing location. This is where Fivetran will operate and run computation on data. - -### Read-Only - -- `created_by` (String) The actor who created the proxy agent. -- `id` (String) The unique identifier for the proxy within your account. -- `proxy_server_uri` (String) The proxy server URI. -- `registred_at` (String) The timestamp of the time the proxy agent was created in your account. -- `salt` (String) The salt. -- `token` (String) The auth token. diff --git a/docs/resources/team.md b/docs/resources/team.md deleted file mode 100644 index 61b5aefb..00000000 --- a/docs/resources/team.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -page_title: "Resource: fivetran_team" ---- - -# Resource: fivetran_team - -This resource allows you to create, update, and delete teams. - -## Example Usage - -```hcl -resource "fivetran_team" "test_team" { - provider = fivetran-provider - - name = "test_team" - description = "test_description" - role = "Account Reviewer" -} -``` - - -## Schema - -### Required - -- `name` (String) The name of the team within your account. -- `role` (String) The account role of the team. - -### Optional - -- `description` (String) The description of the team within your account. - -### Read-Only - -- `id` (String) The unique identifier for the team within your account. - -## Import - -1. To import an existing `fivetran_team` resource into your Terraform state, you need to get `team_id`. -You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_team" "my_imported_fivetran_team" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_team.my_imported_fivetran_team {team_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_team.my_imported_fivetran_team' -``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/team_connector_membership.md b/docs/resources/team_connector_membership.md deleted file mode 100644 index 144e7c61..00000000 --- a/docs/resources/team_connector_membership.md +++ /dev/null @@ -1,82 +0,0 @@ ---- -page_title: "Resource: fivetran_team_connector_membership" ---- - -# Resource: fivetran_team_connector_membership - -This resource allows you to create, update, and delete connector membership for teams - -## Example Usage - -```hcl -resource "fivetran_team_connector_membership" "test_team_connector_membership" { - provider = fivetran-provider - - team_id = "test_team" - - connector { - connector_id = "test_connector" - role = "Connector Administrator" - created_at = "2020-05-25T15:26:47.306509Z" - } - - connector { - connector_id = "test_connector" - role = "Connector Administrator" - created_at = "2020-05-25T15:26:47.306509Z" - } -} -``` - - -## Schema - -### Required - -- `team_id` (String) The unique identifier for the team within your account. - -### Optional - -- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) - -### Read-Only - -- `id` (String) The unique identifier for resource. - - -### Nested Schema for `connector` - -Required: - -- `connector_id` (String) The connector unique identifier -- `role` (String) The team's role that links the team and the connector - -Read-Only: - -- `created_at` (String) The date and time the membership was created - -## Import - -1. To import an existing `fivetran_team_connector_membership` resource into your Terraform state, you need to get `team_id` and `connector_id` -You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_team_connector_membership" "my_imported_fivetran_team_connector_membership" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_team_connector_membership.my_imported_fivetran_team_connector_membership {team_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_team_connector_membership.my_imported_fivetran_team_connector_membership' -``` -5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/team_group_membership.md b/docs/resources/team_group_membership.md deleted file mode 100644 index 3fef8578..00000000 --- a/docs/resources/team_group_membership.md +++ /dev/null @@ -1,82 +0,0 @@ ---- -page_title: "Resource: fivetran_team_group_membership" ---- - -# Resource: fivetran_team_group_membership - -This resource allows you to create, update, and delete group membership for teams - -## Example Usage - -```hcl -resource "fivetran_team_group_membership" "test_team_group_membership" { - provider = fivetran-provider - - team_id = "test_team" - - group { - connector_id = "test_connector" - group_id = "test_group" - role = "Destination Administrator" - } - - group { - connector_id = "test_connector" - group_id = "test_group" - role = "Destination Administrator" - } -} -``` - - -## Schema - -### Required - -- `team_id` (String) The unique identifier for the team within your account. - -### Optional - -- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) - -### Read-Only - -- `id` (String) The unique identifier for resource. - - -### Nested Schema for `group` - -Required: - -- `group_id` (String) The group unique identifier -- `role` (String) The team's role that links the team and the group - -Read-Only: - -- `created_at` (String) The date and time the membership was created - -## Import - -1. To import an existing `fivetran_team_group_membership` resource into your Terraform state, you need to get `team_id` and `group_id` -You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_team_group_membership" "my_imported_fivetran_team_group_membership" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_team_group_membership.my_imported_fivetran_team_group_membership {team_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_team_group_membership.my_imported_fivetran_team_group_membership' -``` -5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/team_user_membership.md b/docs/resources/team_user_membership.md deleted file mode 100644 index 15a1b814..00000000 --- a/docs/resources/team_user_membership.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -page_title: "Resource: fivetran_team_user_membership" ---- - -# Resource: fivetran_team_user_membership - -This resource allows you to create, update, and delete user membership for teams - -## Example Usage - -```hcl -resource "fivetran_team_user_membership" "test_team_user_membership" { - provider = fivetran-provider - - team_id = "test_team" - - user { - user_id = "test_user" - role = "Connector Administrator" - } - - user { - user_id = "test_user" - role = "Connector Administrator" - } -} -``` - - -## Schema - -### Required - -- `team_id` (String) The unique identifier for the team within your account. - -### Optional - -- `user` (Block Set) (see [below for nested schema](#nestedblock--user)) - -### Read-Only - -- `id` (String) The unique identifier for resource. - - -### Nested Schema for `user` - -Required: - -- `role` (String) The team's role that links the team and the user -- `user_id` (String) The user unique identifier - -## Import - -1. To import an existing `fivetran_team_user_membership` resource into your Terraform state, you need to get `team_id` and `user_id` -You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_team_user_membership" "my_imported_fivetran_team_user_membership" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_team_user_membership.my_imported_fivetran_team_user_membership {team_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_team_user_membership.my_imported_fivetran_team_user_membership' -``` -5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/user.md b/docs/resources/user.md deleted file mode 100644 index e3b07b89..00000000 --- a/docs/resources/user.md +++ /dev/null @@ -1,67 +0,0 @@ ---- -page_title: "Resource: fivetran_user" ---- - -# Resource: fivetran_user - --This resource allows you to create, update, and delete users. - -## Example Usage - -```hcl -resource "fivetran_user" "user" { - email = "user@email.address.com" - given_name = "John" - family_name = "Doe" - phone = "+353 00 0000 0000" -} -``` - - -## Schema - -### Required - -- `email` (String) The email address that the user has associated with their user profile. -- `family_name` (String) The last name of the user. -- `given_name` (String) The first name of the user. - -### Optional - -- `invited` (Boolean) The field indicates whether the user has been invited to your account. -- `phone` (String) The phone number of the user. -- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') -- `role` (String) The role that you would like to assign to the user. -- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. - -### Read-Only - -- `created_at` (String) The timestamp that the user created their Fivetran account. -- `id` (String) The unique identifier for the user within the Fivetran system. -- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. - -## Import - -1. To import an existing `fivetran_user` resource into your Terraform state, you need to get `user_id`. -You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_user" "my_imported_fivetran_user" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_user.my_imported_fivetran_user {user_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_user.my_imported_fivetran_user' -``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/user_connector_membership.md b/docs/resources/user_connector_membership.md deleted file mode 100644 index 20fb8d7e..00000000 --- a/docs/resources/user_connector_membership.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -page_title: "Resource: fivetran_user_connector_membership" ---- - -# Resource: fivetran_user_connector_membership - -This resource allows you to create, update, and delete connector membership for user - -## Example Usage - -```hcl -resource "fivetran_user_connector_membership" "test_user_connector_membership" { - provider = fivetran-provider - - user_id = "test_user" - - connector { - connector_id = "test_connector" - role = "Connector Administrator" - created_at = "2020-05-25T15:26:47.306509Z" - } - - connector { - connector_id = "test_connector" - role = "Connector Administrator" - created_at = "2020-05-25T15:26:47.306509Z" - } -} -``` - - -## Schema - -### Required - -- `user_id` (String) The unique identifier for the user within your account. - -### Optional - -- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) - - -### Nested Schema for `connector` - -Required: - -- `connector_id` (String) The connector unique identifier -- `role` (String) The user's role that links the user and the connector - -Read-Only: - -- `created_at` (String) The date and time the membership was created - -## Import - -1. To import an existing `fivetran_user_connector_membership` resource into your Terraform state, you need to get `user_id` and `connector_id` -You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_user_connector_membership" "my_imported_fivetran_user_connector_membership" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_user_connector_membership.my_imported_fivetran_user_connector_membership {user_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_user_connector_membership.my_imported_fivetran_user_connector_membership' -``` -5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/user_group_membership.md b/docs/resources/user_group_membership.md deleted file mode 100644 index 717a45f5..00000000 --- a/docs/resources/user_group_membership.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -page_title: "Resource: fivetran_user_group_membership" ---- - -# Resource: fivetran_user_group_membership - -This resource allows you to create, update, and delete group membership for user - -## Example Usage - -```hcl -resource "fivetran_user_group_membership" "test_user_group_membership" { - provider = fivetran-provider - - user_id = "test_user" - - group { - connector_id = "test_connector" - group_id = "test_group" - role = "Destination Administrator" - } - - group { - connector_id = "test_connector" - group_id = "test_group" - role = "Destination Administrator" - } -} -``` - - -## Schema - -### Required - -- `user_id` (String) The unique identifier for the user within your account. - -### Optional - -- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) - - -### Nested Schema for `group` - -Required: - -- `group_id` (String) The group unique identifier -- `role` (String) The user's role that links the user and the group - -Read-Only: - -- `created_at` (String) The date and time the membership was created - -## Import - -1. To import an existing `fivetran_user_group_membership` resource into your Terraform state, you need to get `user_id` and `group_id` -You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_user_group_membership" "my_imported_fivetran_user_group_membership" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_user_group_membership.my_imported_fivetran_user_group_membership {user_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_user_group_membership.my_imported_fivetran_user_group_membership' -``` -5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/webhook.md b/docs/resources/webhook.md deleted file mode 100644 index 760c4271..00000000 --- a/docs/resources/webhook.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -page_title: "Resource: fivetran_webhook" ---- - -# Resource: fivetran_webhook - -This resource allows you to create, update, and delete webhooks. - -## Example Usage - -```hcl -resource "fivetran_webhook" "test_webhook" { - id = "connector_id" - type = "group" - group_id = "group_id" - url = "https://your-host.your-domain/webhook" - secret = "password" - active = false - events : ["sync_start", "sync_end"] -} -``` - - -## Schema - -### Required - -- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events -- `events` (Set of String) The array of event types -- `secret` (String, Sensitive) The secret string used for payload signing and masked in the response. -- `type` (String) The webhook type (group, account) -- `url` (String) Your webhooks URL endpoint for your application - -### Optional - -- `group_id` (String) The group ID -- `run_tests` (Boolean) Specifies whether the setup tests should be run - -### Read-Only - -- `created_at` (String) The webhook creation timestamp -- `created_by` (String) The ID of the user who created the webhook. -- `id` (String) The webhook ID - -## Import - -1. To import an existing `fivetran_webhook` resource into your Terraform state, you need to get `webhook_id`. -You can retrieve all webhooks using the [fivetran_webhooks data source](/docs/data-sources/webhooks). - -2. Define an empty resource in your `.tf` configuration: - -```hcl -resource "fivetran_webhook" "my_imported_fivetran_webhook" { - -} -``` - -3. Run the `terraform import` command: - -``` -terraform import fivetran_webhook.my_imported_fivetran_webhook {webhook_id} -``` - -4. Use the `terraform state show` command to get the values from the state: - -``` -terraform state show 'fivetran_webhook.my_imported_fivetran_webhook' -``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/fivetran/framework/core/model/quickstart_package.go b/fivetran/framework/core/model/quickstart_package.go new file mode 100644 index 00000000..64c53ef3 --- /dev/null +++ b/fivetran/framework/core/model/quickstart_package.go @@ -0,0 +1,35 @@ +package model + +import ( + "context" + + "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type QuickstartPackage struct { + Id types.String `tfsdk:"id"` + Name types.String `tfsdk:"name"` + Version types.String `tfsdk:"version"` + ConnectorTypes types.Set `tfsdk:"connector_types"` + OutputModelNames types.Set `tfsdk:"output_model_names"` +} + +func (d *QuickstartPackage) ReadFromResponse(ctx context.Context, resp transformations.QuickstartPackageResponse) { + d.Id = types.StringValue(resp.Data.Id) + d.Name = types.StringValue(resp.Data.Name) + d.Version = types.StringValue(resp.Data.Version) + + connectors := []attr.Value{} + for _, connector := range resp.Data.ConnectorTypes { + connectors = append(connectors, types.StringValue(connector)) + } + d.ConnectorTypes, _ = types.SetValue(types.StringType, connectors) + + models := []attr.Value{} + for _, connector := range resp.Data.OutputModelNames { + models = append(models, types.StringValue(connector)) + } + d.OutputModelNames, _ = types.SetValue(types.StringType, models) +} \ No newline at end of file diff --git a/fivetran/framework/core/model/quickstart_packages.go b/fivetran/framework/core/model/quickstart_packages.go new file mode 100644 index 00000000..e2b6de6f --- /dev/null +++ b/fivetran/framework/core/model/quickstart_packages.go @@ -0,0 +1,62 @@ +package model + +import ( + "context" + + "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type QuickstartPackages struct { + Packages types.List `tfsdk:"packages"` +} + +func (d *QuickstartPackages) ReadFromResponse(ctx context.Context, resp transformations.QuickstartPackagesListResponse) { + elemTypeAttrs := map[string]attr.Type{ + "id": types.StringType, + "name": types.StringType, + "version": types.StringType, + "connector_types": types.SetType{ElemType: types.StringType}, + "output_model_names": types.SetType{ElemType: types.StringType}, + } + + if resp.Data.Items == nil { + d.Packages = types.ListNull(types.ObjectType{AttrTypes: elemTypeAttrs}) + } else { + items := []attr.Value{} + for _, v := range resp.Data.Items { + item := map[string]attr.Value{} + item["id"] = types.StringValue(v.Id) + item["name"] = types.StringValue(v.Name) + item["version"] = types.StringValue(v.Version) + + connectors := []attr.Value{} + for _, el := range v.ConnectorTypes { + connectors = append(connectors, types.StringValue(el)) + } + + if len(connectors) > 0 { + item["connector_types"] = types.SetValueMust(types.StringType, connectors) + } else { + item["connector_types"] = types.SetNull(types.StringType) + } + + models := []attr.Value{} + for _, el := range v.OutputModelNames { + models = append(models, types.StringValue(el)) + } + + if len(models) > 0 { + item["output_model_names"] = types.SetValueMust(types.StringType, models) + } else { + item["output_model_names"] = types.SetNull(types.StringType) + } + + objectValue, _ := types.ObjectValue(elemTypeAttrs, item) + items = append(items, objectValue) + } + + d.Packages, _ = types.ListValue(types.ObjectType{AttrTypes: elemTypeAttrs}, items) + } +} diff --git a/fivetran/framework/core/model/transformation_project.go b/fivetran/framework/core/model/transformation_project.go index 5ced1e05..46dc6c93 100644 --- a/fivetran/framework/core/model/transformation_project.go +++ b/fivetran/framework/core/model/transformation_project.go @@ -8,7 +8,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) -type TransformationProject struct { +type TransformationResourceProject struct { Id types.String `tfsdk:"id"` GroupId types.String `tfsdk:"group_id"` Type types.String `tfsdk:"type"` @@ -20,7 +20,18 @@ type TransformationProject struct { ProjectConfig types.Object `tfsdk:"project_config"` } -func (d *TransformationProject) ReadFromResponse(ctx context.Context, resp transformations.TransformationProjectResponse) { +type TransformationDatasourceProject struct { + Id types.String `tfsdk:"id"` + GroupId types.String `tfsdk:"group_id"` + Type types.String `tfsdk:"type"` + Status types.String `tfsdk:"status"` + CreatedAt types.String `tfsdk:"created_at"` + CreatedById types.String `tfsdk:"created_by_id"` + Errors types.Set `tfsdk:"errors"` + ProjectConfig types.Object `tfsdk:"project_config"` +} + +func (d *TransformationResourceProject) ReadFromResponse(ctx context.Context, resp transformations.TransformationProjectResponse) { d.Id = types.StringValue(resp.Data.Id) d.GroupId = types.StringValue(resp.Data.GroupId) d.Type = types.StringValue(resp.Data.ProjectType) @@ -71,6 +82,59 @@ func (d *TransformationProject) ReadFromResponse(ctx context.Context, resp trans projectConfigItems["environment_vars"] = types.SetNull(types.StringType) } + d.ProjectConfig, _ = types.ObjectValue(projectConfigTypes, projectConfigItems) +} + +func (d *TransformationDatasourceProject) ReadFromResponse(ctx context.Context, resp transformations.TransformationProjectResponse) { + d.Id = types.StringValue(resp.Data.Id) + d.GroupId = types.StringValue(resp.Data.GroupId) + d.Type = types.StringValue(resp.Data.ProjectType) + d.CreatedAt = types.StringValue(resp.Data.CreatedAt) + d.CreatedById = types.StringValue(resp.Data.CreatedById) + d.Status = types.StringValue(resp.Data.Status) + + errors := []attr.Value{} + for _, el := range resp.Data.Errors { + errors = append(errors, types.StringValue(el)) + } + if len(errors) > 0 { + d.Errors = types.SetValueMust(types.StringType, errors) + } else { + if d.Errors.IsUnknown() { + d.Errors = types.SetNull(types.StringType) + } + } + projectConfigTypes := map[string]attr.Type{ + "dbt_version": types.StringType, + "default_schema": types.StringType, + "git_remote_url": types.StringType, + "folder_path": types.StringType, + "git_branch": types.StringType, + "target_name": types.StringType, + "environment_vars": types.SetType{ElemType: types.StringType}, + "public_key": types.StringType, + "threads": types.Int64Type, + } + projectConfigItems := map[string]attr.Value{} + projectConfigItems["dbt_version"] = types.StringValue(resp.Data.ProjectConfig.DbtVersion) + projectConfigItems["default_schema"] = types.StringValue(resp.Data.ProjectConfig.DefaultSchema) + projectConfigItems["git_remote_url"] = types.StringValue(resp.Data.ProjectConfig.GitRemoteUrl) + projectConfigItems["folder_path"] = types.StringValue(resp.Data.ProjectConfig.FolderPath) + projectConfigItems["git_branch"] = types.StringValue(resp.Data.ProjectConfig.GitBranch) + projectConfigItems["target_name"] = types.StringValue(resp.Data.ProjectConfig.TargetName) + projectConfigItems["public_key"] = types.StringValue(resp.Data.ProjectConfig.PublicKey) + projectConfigItems["threads"] = types.Int64Value(int64(resp.Data.ProjectConfig.Threads)) + + envVars := []attr.Value{} + for _, el := range resp.Data.ProjectConfig.EnvironmentVars { + envVars = append(envVars, types.StringValue(el)) + } + if len(envVars) > 0 { + projectConfigItems["environment_vars"] = types.SetValueMust(types.StringType, envVars) + } else { + projectConfigItems["environment_vars"] = types.SetNull(types.StringType) + } + d.ProjectConfig, _ = types.ObjectValue(projectConfigTypes, projectConfigItems) } \ No newline at end of file diff --git a/fivetran/framework/core/model/transformation_projects.go b/fivetran/framework/core/model/transformation_projects.go new file mode 100644 index 00000000..f36a7e9c --- /dev/null +++ b/fivetran/framework/core/model/transformation_projects.go @@ -0,0 +1,40 @@ +package model + +import ( + "context" + + "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type TransformationProjects struct { + Projects types.List `tfsdk:"projects"` +} + +func (d *TransformationProjects) ReadFromResponse(ctx context.Context, resp transformations.TransformationProjectsListResponse) { + elemTypeAttrs := map[string]attr.Type{ + "id": types.StringType, + "type": types.StringType, + "group_id": types.StringType, + "created_at": types.StringType, + "created_by_id": types.StringType, + } + + if resp.Data.Items == nil { + d.Projects = types.ListNull(types.ObjectType{AttrTypes: elemTypeAttrs}) + } else { + items := []attr.Value{} + for _, v := range resp.Data.Items { + item := map[string]attr.Value{} + item["id"] = types.StringValue(v.Id) + item["type"] = types.StringValue(v.ProjectType) + item["group_id"] = types.StringValue(v.GroupId) + item["created_at"] = types.StringValue(v.CreatedAt) + item["created_by_id"] = types.StringValue(v.CreatedById) + objectValue, _ := types.ObjectValue(elemTypeAttrs, item) + items = append(items, objectValue) + } + d.Projects, _ = types.ListValue(types.ObjectType{AttrTypes: elemTypeAttrs}, items) + } +} \ No newline at end of file diff --git a/fivetran/framework/core/schema/quickstart_packages.go b/fivetran/framework/core/schema/quickstart_packages.go new file mode 100644 index 00000000..f465296e --- /dev/null +++ b/fivetran/framework/core/schema/quickstart_packages.go @@ -0,0 +1,70 @@ +package schema + +import ( + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" +) + +func QuickstartPackagesDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema { + Blocks: map[string]datasourceSchema.Block{ + "packages": datasourceSchema.ListNestedBlock{ + NestedObject: datasourceSchema.NestedBlockObject{ + Attributes: map[string]datasourceSchema.Attribute{ + "id": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The unique identifier for the Quickstart transformation package definition within the Fivetran system", + }, + "name": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The Quickstart transformation package name", + }, + "version": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The Quickstart package definition version", + }, + "connector_types": datasourceSchema.SetAttribute{ + Computed: true, + Description: "The set of connector types", + ElementType: basetypes.StringType{}, + }, + "output_model_names": datasourceSchema.SetAttribute{ + Computed: true, + Description: "The list of transformation output models", + ElementType: basetypes.StringType{}, + }, + }, + }, + }, + }, + } +} + +func QuickstartPackageDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema { + Attributes: map[string]datasourceSchema.Attribute{ + "id": datasourceSchema.StringAttribute{ + Required: true, + Description: "The unique identifier for the Quickstart transformation package definition within the Fivetran system", + }, + "name": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The Quickstart transformation package name", + }, + "version": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The Quickstart package definition version", + }, + "connector_types": datasourceSchema.SetAttribute{ + Computed: true, + Description: "The set of connector types", + ElementType: basetypes.StringType{}, + }, + "output_model_names": datasourceSchema.SetAttribute{ + Computed: true, + Description: "The list of transformation output models", + ElementType: basetypes.StringType{}, + }, + }, + } +} \ No newline at end of file diff --git a/fivetran/framework/core/schema/transformation_project.go b/fivetran/framework/core/schema/transformation_project.go index cf8704e4..28015ef0 100644 --- a/fivetran/framework/core/schema/transformation_project.go +++ b/fivetran/framework/core/schema/transformation_project.go @@ -12,10 +12,10 @@ func TransformationProjectResource(ctx context.Context) resourceSchema.Schema { return resourceSchema.Schema{ Attributes: transformationProjectSchema().GetResourceSchema(), Blocks: map[string]resourceSchema.Block{ - "project_config": resourceSchema.SingleNestedBlock{ - Attributes: transformationProjectConfigSchema().GetResourceSchema(), - }, - }, + "project_config": resourceSchema.SingleNestedBlock{ + Attributes: transformationProjectConfigSchema().GetResourceSchema(), + }, + }, } } @@ -23,16 +23,44 @@ func TransformationProjectDatasource() datasourceSchema.Schema { return datasourceSchema.Schema{ Attributes: transformationProjectSchema().GetDatasourceSchema(), Blocks: map[string]datasourceSchema.Block{ - "project_config": datasourceSchema.SingleNestedBlock{ - Attributes: dbtProjectConfigSchema().GetDatasourceSchema(), - }, + "project_config": datasourceSchema.SingleNestedBlock{ + Attributes: transformationProjectConfigSchema().GetDatasourceSchema(), + }, }, } } func TransformationProjectListDatasource() datasourceSchema.Schema { return datasourceSchema.Schema{ - Attributes: transformationProjectSchema().GetDatasourceSchema(), + Attributes: map[string]datasourceSchema.Attribute{ + "projects": datasourceSchema.ListNestedAttribute{ + Computed: true, + NestedObject: datasourceSchema.NestedAttributeObject{ + Attributes: map[string]datasourceSchema.Attribute{ + "id": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The unique identifier for the transformation project within the Fivetran system.", + }, + "group_id": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The name of the group within your account related to the project.", + }, + "created_at": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The timestamp of when the project was created in your account.", + }, + "created_by_id": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The unique identifier for the User within the Fivetran system who created the transformation Project.", + }, + "type": datasourceSchema.StringAttribute{ + Computed: true, + Description: "Transformation project type.", + }, + }, + }, + }, + }, } } @@ -42,7 +70,7 @@ func transformationProjectSchema() core.Schema { "id": { IsId: true, ValueType: core.String, - Description: "The unique identifier for the dbt Project within the Fivetran system.", + Description: "The unique identifier for the transformation Project within the Fivetran system.", }, "group_id": { Required: true, @@ -59,13 +87,12 @@ func transformationProjectSchema() core.Schema { "status": { ValueType: core.String, Readonly: true, - ResourceOnly:true, - Description: "Status of dbt Project (NOT_READY, READY, ERROR).", + Description: "Status of transformation Project (NOT_READY, READY, ERROR).", }, "created_at": { ValueType: core.String, Readonly: true, - Description: "The timestamp of the dbt Project creation.", + Description: "The timestamp of the transformation Project creation.", }, "created_by_id": { ValueType: core.String, @@ -92,7 +119,7 @@ func transformationProjectConfigSchema() core.Schema { "dbt_version": { ValueType: core.String, ForceNew: true, - Description: "The version of dbt that should run the project", + Description: "The version of transformation that should run the project", }, "default_schema": { ValueType: core.String, @@ -102,11 +129,11 @@ func transformationProjectConfigSchema() core.Schema { "git_remote_url": { ValueType: core.String, ForceNew: true, - Description: "Git remote URL with your dbt project", + Description: "Git remote URL with your transformation project", }, "folder_path": { ValueType: core.String, - Description: "Folder in Git repo with your dbt project", + Description: "Folder in Git repo with your transformation project", }, "git_branch": { ValueType: core.String, @@ -114,7 +141,7 @@ func transformationProjectConfigSchema() core.Schema { }, "threads": { ValueType: core.Integer, - Description: "The number of threads dbt will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse.", + Description: "The number of threads transformation will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse.", }, "target_name": { ValueType: core.String, diff --git a/fivetran/framework/datasources/quickstart_package.go b/fivetran/framework/datasources/quickstart_package.go new file mode 100644 index 00000000..535c73e8 --- /dev/null +++ b/fivetran/framework/datasources/quickstart_package.go @@ -0,0 +1,58 @@ +package datasources + +import ( + "context" + "fmt" + + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +func QuickstartPackage() datasource.DataSource { + return &quickstartPackage{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &quickstartPackage{} + +type quickstartPackage struct { + core.ProviderDatasource +} + +func (d *quickstartPackage) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_quickstart_package" +} + +func (d *quickstartPackage) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.QuickstartPackageDatasource() +} + +func (d *quickstartPackage) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.QuickstartPackage + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + response, err := d.GetClient().NewQuickstartPackageDetails().PackageDefinitionId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "QuickstartPackage Read Error.", + fmt.Sprintf("%v; code: %v; message: %v", err, response.Code, response.Message), + ) + return + } + + data.ReadFromResponse(ctx, response) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} \ No newline at end of file diff --git a/fivetran/framework/datasources/quickstart_package_test.go b/fivetran/framework/datasources/quickstart_package_test.go new file mode 100644 index 00000000..1e29e9d6 --- /dev/null +++ b/fivetran/framework/datasources/quickstart_package_test.go @@ -0,0 +1,81 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +const ( + quickstartPackageMappingResponse = ` +{ + "id": "package_definition_id", + "name": "package_definition_name", + "version": "version", + "connector_types": [ + "string" + ], + "output_model_names": [ + "string" + ] + } + ` +) + +var ( + quickstartPackageDataSourceMockGetHandler *mock.Handler + + quickstartPackageDataSourceMockData map[string]interface{} +) + +func setupMockClientQuickstartPackageDataSourceConfigMapping(t *testing.T) { + tfmock.MockClient().Reset() + + quickstartPackageDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations/package-metadata/package_definition_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + quickstartPackageDataSourceMockData = tfmock.CreateMapFromJsonString(t, quickstartPackageMappingResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", quickstartPackageDataSourceMockData), nil + }, + ) +} + +func TestDataSourceQuickstartPackageConfigMappingMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + data "fivetran_quickstart_package" "test" { + provider = fivetran-provider + id = "package_definition_id" + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, quickstartPackageDataSourceMockGetHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, quickstartPackageDataSourceMockData) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_quickstart_package.test", "id", "package_definition_id"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_package.test", "name", "package_definition_name"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_package.test", "version", "version"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientQuickstartPackageDataSourceConfigMapping(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/datasources/quickstart_packages.go b/fivetran/framework/datasources/quickstart_packages.go new file mode 100644 index 00000000..79da2ac3 --- /dev/null +++ b/fivetran/framework/datasources/quickstart_packages.go @@ -0,0 +1,84 @@ +package datasources + +import ( + "context" + "fmt" + + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + "github.com/hashicorp/terraform-plugin-framework/datasource" + sdk "github.com/fivetran/go-fivetran/transformations" + + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" +) + +func QuickstartPackages() datasource.DataSource { + return &quickstartPackages{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &quickstartPackages{} + +type quickstartPackages struct { + core.ProviderDatasource +} + +func (d *quickstartPackages) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_quickstart_packages" +} + +func (d *quickstartPackages) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.QuickstartPackagesDatasource() +} + +func (d *quickstartPackages) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.QuickstartPackages + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + var respNextCursor string + var listResponse sdk.QuickstartPackagesListResponse + limit := 1000 + + for { + var err error + var tmpResp sdk.QuickstartPackagesListResponse + svc := d.GetClient().NewQuickstartPackagesList() + + if respNextCursor == "" { + tmpResp, err = svc.Limit(limit).Do(ctx) + } + + if respNextCursor != "" { + tmpResp, err = svc.Limit(limit).Cursor(respNextCursor).Do(ctx) + } + + if err != nil { + resp.Diagnostics.AddError( + "Read error.", + fmt.Sprintf("%v; code: %v", err, tmpResp.Code), + ) + listResponse = sdk.QuickstartPackagesListResponse{} + } + + listResponse.Data.Items = append(listResponse.Data.Items, tmpResp.Data.Items...) + + if tmpResp.Data.NextCursor == "" { + break + } + + respNextCursor = tmpResp.Data.NextCursor + } + + data.ReadFromResponse(ctx, listResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/fivetran/framework/datasources/quickstart_packages_test.go b/fivetran/framework/datasources/quickstart_packages_test.go new file mode 100644 index 00000000..1d08b718 --- /dev/null +++ b/fivetran/framework/datasources/quickstart_packages_test.go @@ -0,0 +1,98 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + QuickstartPackagesDataSourceMockGetHandler *mock.Handler + QuickstartPackagesDataSourceMockData map[string]interface{} +) + +const ( + QuickstartPackagesMappingResponse = ` + { + "items": [ + { + "id": "package_definition_id", + "name": "package_definition_name", + "version": "version", + "connector_types": [ + "string" + ], + "output_model_names": [ + "string" + ] + }, + { + "id": "package_definition_id_2", + "name": "package_definition_name_2", + "version": "version_2", + "connector_types": [ + "string_2" + ], + "output_model_names": [ + "string_2" + ] + } + ], + "next_cursor": null + }` +) + +func setupMockClientQuickstartPackagesDataSourceConfigMapping(t *testing.T) { + tfmock.MockClient().Reset() + + QuickstartPackagesDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations/package-metadata").ThenCall( + func(req *http.Request) (*http.Response, error) { + QuickstartPackagesDataSourceMockData = tfmock.CreateMapFromJsonString(t, QuickstartPackagesMappingResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", QuickstartPackagesDataSourceMockData), nil + }, + ) +} + +func TestDataSourceQuickstartPackagesMappingMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + data "fivetran_quickstart_packages" "test_quickstart_package" { + provider = fivetran-provider + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, QuickstartPackagesDataSourceMockGetHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, QuickstartPackagesDataSourceMockData) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.0.id", "package_definition_id"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.0.name", "package_definition_name"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.0.version", "version"), + + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.1.id", "package_definition_id_2"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.1.name", "package_definition_name_2"), + resource.TestCheckResourceAttr("data.fivetran_quickstart_packages.test_quickstart_package", "packages.1.version", "version_2"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientQuickstartPackagesDataSourceConfigMapping(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/datasources/transformation_project.go b/fivetran/framework/datasources/transformation_project.go new file mode 100644 index 00000000..46ca4f7f --- /dev/null +++ b/fivetran/framework/datasources/transformation_project.go @@ -0,0 +1,58 @@ +package datasources + +import ( + "context" + "fmt" + + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +func TransformationProject() datasource.DataSource { + return &transformationProject{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &transformationProject{} + +type transformationProject struct { + core.ProviderDatasource +} + +func (d *transformationProject) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_transformation_project" +} + +func (d *transformationProject) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.TransformationProjectDatasource() +} + +func (d *transformationProject) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.TransformationDatasourceProject + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + projectResponse, err := d.GetClient().NewTransformationProjectDetails().ProjectId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "TransformationProject Read Error.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } + + data.ReadFromResponse(ctx, projectResponse) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} \ No newline at end of file diff --git a/fivetran/framework/datasources/transformation_project_test.go b/fivetran/framework/datasources/transformation_project_test.go new file mode 100644 index 00000000..c8ae0464 --- /dev/null +++ b/fivetran/framework/datasources/transformation_project_test.go @@ -0,0 +1,108 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationProjectDataSourceMockGetHandler *mock.Handler + transformationProjectDataSourceMockData map[string]interface{} +) + +func setupMockClientTransformationProjectDataSourceMappingTest(t *testing.T) { + transformationProjectResponse := ` +{ + "id": "projectId", + "type": "DBT_GIT", + "status": "NOT_READY", + "errors": [ + "string" + ], + "created_at": "created_at", + "group_id": "group_id", + "setup_tests": [ + { + "title": "Test Title", + "status": "FAILED", + "message": "Error message", + "details": "Error details" + } + ], + "created_by_id": "created_by_id", + "project_config": { + "dbt_version": "dbt_version", + "default_schema": "default_schema", + "git_remote_url": "git_remote_url", + "folder_path": "folder_path", + "git_branch": "git_branch", + "threads": 0, + "target_name": "target_name", + "environment_vars": [ + "environment_var" + ], + "public_key": "public_key" + } +}` + tfmock.MockClient().Reset() + + transformationProjectDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects/projectId").ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationProjectDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectDataSourceMockData), nil + }, + ) +} + +func TestDataSourceTransformationProjectMappingMock(t *testing.T) { + // NOTE: the config is totally inconsistent and contains all possible values for mapping test + step1 := resource.TestStep{ + Config: ` + data "fivetran_transformation_project" "project" { + provider = fivetran-provider + id = "projectId" + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectDataSourceMockGetHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, transformationProjectDataSourceMockData) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "id", "projectId"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "group_id", "group_id"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "created_at", "created_at"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "type", "DBT_GIT"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.dbt_version", "dbt_version"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.public_key", "public_key"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.default_schema", "default_schema"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.target_name", "target_name"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), + resource.TestCheckResourceAttr("data.fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationProjectDataSourceMappingTest(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/datasources/transformation_projects.go b/fivetran/framework/datasources/transformation_projects.go new file mode 100644 index 00000000..b977980a --- /dev/null +++ b/fivetran/framework/datasources/transformation_projects.go @@ -0,0 +1,83 @@ +package datasources + +import ( + "context" + "fmt" + + sdk "github.com/fivetran/go-fivetran/transformations" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +func TransformationProjects() datasource.DataSource { + return &transformationProjects{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &transformationProjects{} + +type transformationProjects struct { + core.ProviderDatasource +} + +func (d *transformationProjects) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_transformation_projects" +} + +func (d *transformationProjects) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.TransformationProjectListDatasource() +} + +func (d *transformationProjects) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.TransformationProjects + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + var respNextCursor string + var listResponse sdk.TransformationProjectsListResponse + limit := 1000 + + for { + var err error + var tmpResp sdk.TransformationProjectsListResponse + svc := d.GetClient().NewTransformationProjectsList() + + if respNextCursor == "" { + tmpResp, err = svc.Limit(limit).Do(ctx) + } + + if respNextCursor != "" { + tmpResp, err = svc.Limit(limit).Cursor(respNextCursor).Do(ctx) + } + + if err != nil { + resp.Diagnostics.AddError( + "Read error.", + fmt.Sprintf("%v; code: %v", err, tmpResp.Code), + ) + listResponse = sdk.TransformationProjectsListResponse{} + } + + listResponse.Data.Items = append(listResponse.Data.Items, tmpResp.Data.Items...) + + if tmpResp.Data.NextCursor == "" { + break + } + + respNextCursor = tmpResp.Data.NextCursor + } + + data.ReadFromResponse(ctx, listResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/fivetran/framework/datasources/transformation_projects_test.go b/fivetran/framework/datasources/transformation_projects_test.go new file mode 100644 index 00000000..5669ad0e --- /dev/null +++ b/fivetran/framework/datasources/transformation_projects_test.go @@ -0,0 +1,93 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationProjectsDataSourceMockGetHandler *mock.Handler + transformationProjectsDataSourceMockData map[string]interface{} +) + +const ( + transformationProjectsMappingResponse = ` + { + "items":[ + { + "id": "string", + "type": "DBT_GIT", + "created_at": "created_at", + "created_by_id": "string", + "group_id": "string" + }, + { + "id": "string2", + "type": "DBT_GIT", + "created_at": "created_at_2", + "created_by_id": "string2", + "group_id": "string2" + } + ], + "next_cursor": null + } + ` +) + +func setupMockClientTransformationProjectsDataSourceConfigMapping(t *testing.T) { + tfmock.MockClient().Reset() + transformationProjectsDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects").ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationProjectsDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectsMappingResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectsDataSourceMockData), nil + }, + ) +} + +func TestDataSourceTransformationProjectsMappingMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + data "fivetran_transformation_projects" "test_projects" { + provider = fivetran-provider + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectsDataSourceMockGetHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.0.id", "string"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.0.group_id", "string"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.0.created_at", "created_at"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.0.created_by_id", "string"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.0.type", "DBT_GIT"), + + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.1.id", "string2"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.1.group_id", "string2"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.1.created_at", "created_at_2"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.1.created_by_id", "string2"), + resource.TestCheckResourceAttr("data.fivetran_transformation_projects.test_projects", "projects.1.type", "DBT_GIT"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationProjectsDataSourceConfigMapping(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/provider.go b/fivetran/framework/provider.go index 77177c49..46b4640c 100644 --- a/fivetran/framework/provider.go +++ b/fivetran/framework/provider.go @@ -170,5 +170,9 @@ func (p *fivetranProvider) DataSources(ctx context.Context) []func() datasource. datasources.Connectors, datasources.Destinations, datasources.ExternalLogs, + datasources.QuickstartPackage, + datasources.QuickstartPackages, + datasources.TransformationProject, + datasources.TransformationProjects, } } diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go index 9fa5183f..fe6ad477 100644 --- a/fivetran/framework/resources/transformation_project.go +++ b/fivetran/framework/resources/transformation_project.go @@ -47,7 +47,7 @@ func (r *transformationProject) Create(ctx context.Context, req resource.CreateR return } - var data model.TransformationProject + var data model.TransformationResourceProject // Read Terraform plan data into the model resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) @@ -147,7 +147,7 @@ func (r *transformationProject) Read(ctx context.Context, req resource.ReadReque return } - var data model.TransformationProject + var data model.TransformationResourceProject // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) @@ -181,8 +181,8 @@ func (r *transformationProject) Update(ctx context.Context, req resource.UpdateR return } - var state model.TransformationProject - var plan model.TransformationProject + var state model.TransformationResourceProject + var plan model.TransformationResourceProject // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &state)...) @@ -239,7 +239,7 @@ func (r *transformationProject) Delete(ctx context.Context, req resource.DeleteR return } - var data model.TransformationProject + var data model.TransformationResourceProject // Read Terraform prior state data into the model resp.Diagnostics.Append(req.State.Get(ctx, &data)...) diff --git a/fivetran/framework/resources/transformation_project_test.go b/fivetran/framework/resources/transformation_project_test.go index 467c0b73..d2cf4d8e 100644 --- a/fivetran/framework/resources/transformation_project_test.go +++ b/fivetran/framework/resources/transformation_project_test.go @@ -3,7 +3,6 @@ package resources_test import ( "net/http" "testing" - "fmt" "github.com/fivetran/go-fivetran/tests/mock" tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" diff --git a/templates/data-sources/quickstart_package.md.tmpl b/templates/data-sources/quickstart_package.md.tmpl new file mode 100644 index 00000000..c77d8fd0 --- /dev/null +++ b/templates/data-sources/quickstart_package.md.tmpl @@ -0,0 +1,17 @@ +--- +page_title: "Data Source: fivetran_quickstart_package" +--- + +# Data Source: fivetran_quickstart_package + +This data source returns the metadata details of the Quickstart transformation package if a valid identifier is provided + +## Example Usage + +```hcl +data "fivetran_quickstart_package" "test" { + id = "id" +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/data-sources/quickstart_packages.md.tmpl b/templates/data-sources/quickstart_packages.md.tmpl new file mode 100644 index 00000000..3a7ff2e2 --- /dev/null +++ b/templates/data-sources/quickstart_packages.md.tmpl @@ -0,0 +1,17 @@ +--- +page_title: "Data Source: fivetran_quickstart_packages" +--- + +# Data Source: fivetran_quickstart_packages + +Returns a list of available Quickstart transformation package metadata details + +## Example Usage + +```hcl +data "fivetran_quickstart_packages" "test" { + id = "id" +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/data-sources/transformation_projects.md.tmpl b/templates/data-sources/transformation_projects.md.tmpl new file mode 100644 index 00000000..5f7ac08f --- /dev/null +++ b/templates/data-sources/transformation_projects.md.tmpl @@ -0,0 +1,16 @@ +--- +page_title: "Data Source: fivetran_transformation_projects" +--- + +# Data Source: fivetran_transformation_projects + +Returns a list of all transformation projects available via API within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_transformation_projects" "test" { +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/resources/transformation_project.md.tmpl b/templates/resources/transformation_project.md.tmpl new file mode 100644 index 00000000..4f6fe49c --- /dev/null +++ b/templates/resources/transformation_project.md.tmpl @@ -0,0 +1,59 @@ +--- +page_title: "Resource: fivetran_transformation_project" +--- + +# Resource: fivetran_transformation_project + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete transformation projects in your account. + +## Example Usage + +```hcl +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +{{ .SchemaMarkdown | trimspace }} + +## Import + +1. To import an existing `fivetran_transformation_project` resource into your Terraform state, you need to get **Transformation Project ID** via API call `GET https://api.fivetran.com/v1/transformation-projects` to retrieve available projects. +2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/transformation-projects/{project-id}` to ensure that this is the project you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_transformation_project" "my_imported_fivetran_transformation_project" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_transformation_project.my_imported_fivetran_transformation_project {Transformation Project ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_transformation_project.my_imported_fivetran_transformation_project' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file From c0481aa809a45539fd80dd8f2539a4241fbfcd7d Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Wed, 22 Jan 2025 18:16:31 +0100 Subject: [PATCH 04/13] docs --- docs/data-sources/quickstart_package.md | 10 +- docs/data-sources/quickstart_packages.md | 10 +- docs/data-sources/transformation_project.md | 47 + docs/data-sources/transformation_projects.md | 17 + docs/data-sources/user.md | 35 + .../user_connector_memberships.md | 35 + docs/data-sources/user_group_memberships.md | 35 + docs/data-sources/users.md | 41 + docs/data-sources/webhook.md | 34 + docs/data-sources/webhooks.md | 40 + docs/guides/connector_setup.md | 142 + docs/guides/dbt_private_git_deploy_key.md | 62 + docs/guides/dbt_transformation.md | 92 + docs/guides/schema_json.md | 53 + docs/guides/schema_setup.md | 100 + docs/guides/version_0.7.2_update_guides.md | 108 + docs/guides/version_1.1.18_update_guides.md | 98 + docs/guides/version_1.3.0_update_guides.md | 116 + docs/guides/version_1.4.0_update_guides.md | 104 + docs/index.md | 51 + docs/resources/connector.md | 3961 +++++++++++++++++ docs/resources/connector_certificates.md | 46 + docs/resources/connector_fingerprints.md | 79 + docs/resources/connector_schedule.md | 73 + docs/resources/connector_schema_config.md | 280 ++ docs/resources/dbt_git_project_config.md | 64 + docs/resources/dbt_project.md | 104 + docs/resources/dbt_transformation.md | 93 + docs/resources/destination.md | 486 ++ docs/resources/destination_certificates.md | 46 + docs/resources/destination_fingerprints.md | 79 + docs/resources/external_logging.md | 95 + docs/resources/group.md | 58 + docs/resources/group_users.md | 78 + docs/resources/hybrid_deployment_agent.md | 41 + docs/resources/local_processing_agent.md | 54 + docs/resources/private_link.md | 55 + docs/resources/proxy_agent.md | 35 + docs/resources/team.md | 61 + docs/resources/team_connector_membership.md | 82 + docs/resources/team_group_membership.md | 82 + docs/resources/team_user_membership.md | 76 + docs/resources/transformation_project.md | 96 + docs/resources/user.md | 67 + docs/resources/user_connector_membership.md | 78 + docs/resources/user_group_membership.md | 78 + docs/resources/webhook.md | 69 + .../transformation_project.md.tmpl | 17 + 48 files changed, 7653 insertions(+), 10 deletions(-) create mode 100644 docs/data-sources/transformation_project.md create mode 100644 docs/data-sources/user.md create mode 100644 docs/data-sources/user_connector_memberships.md create mode 100644 docs/data-sources/user_group_memberships.md create mode 100644 docs/data-sources/users.md create mode 100644 docs/data-sources/webhook.md create mode 100644 docs/data-sources/webhooks.md create mode 100644 docs/guides/connector_setup.md create mode 100644 docs/guides/dbt_private_git_deploy_key.md create mode 100644 docs/guides/dbt_transformation.md create mode 100644 docs/guides/schema_json.md create mode 100644 docs/guides/schema_setup.md create mode 100644 docs/guides/version_0.7.2_update_guides.md create mode 100644 docs/guides/version_1.1.18_update_guides.md create mode 100644 docs/guides/version_1.3.0_update_guides.md create mode 100644 docs/guides/version_1.4.0_update_guides.md create mode 100644 docs/index.md create mode 100644 docs/resources/connector.md create mode 100644 docs/resources/connector_certificates.md create mode 100644 docs/resources/connector_fingerprints.md create mode 100644 docs/resources/connector_schedule.md create mode 100644 docs/resources/connector_schema_config.md create mode 100644 docs/resources/dbt_git_project_config.md create mode 100644 docs/resources/dbt_project.md create mode 100644 docs/resources/dbt_transformation.md create mode 100644 docs/resources/destination.md create mode 100644 docs/resources/destination_certificates.md create mode 100644 docs/resources/destination_fingerprints.md create mode 100644 docs/resources/external_logging.md create mode 100644 docs/resources/group.md create mode 100644 docs/resources/group_users.md create mode 100644 docs/resources/hybrid_deployment_agent.md create mode 100644 docs/resources/local_processing_agent.md create mode 100644 docs/resources/private_link.md create mode 100644 docs/resources/proxy_agent.md create mode 100644 docs/resources/team.md create mode 100644 docs/resources/team_connector_membership.md create mode 100644 docs/resources/team_group_membership.md create mode 100644 docs/resources/team_user_membership.md create mode 100644 docs/resources/transformation_project.md create mode 100644 docs/resources/user.md create mode 100644 docs/resources/user_connector_membership.md create mode 100644 docs/resources/user_group_membership.md create mode 100644 docs/resources/webhook.md create mode 100644 templates/data-sources/transformation_project.md.tmpl diff --git a/docs/data-sources/quickstart_package.md b/docs/data-sources/quickstart_package.md index 51320e79..000a80f5 100644 --- a/docs/data-sources/quickstart_package.md +++ b/docs/data-sources/quickstart_package.md @@ -17,17 +17,13 @@ data "fivetran_quickstart_package" "test" { ## Schema -### Read-Only - -- `packages` (Block List) (see [below for nested schema](#nestedblock--packages)) +### Required - -### Nested Schema for `packages` +- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system -Read-Only: +### Read-Only - `connector_types` (Set of String) The set of connector types -- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system - `name` (String) The Quickstart transformation package name - `output_model_names` (Set of String) The list of transformation output models - `version` (String) The Quickstart package definition version \ No newline at end of file diff --git a/docs/data-sources/quickstart_packages.md b/docs/data-sources/quickstart_packages.md index 8cde78c9..2bdd1d78 100644 --- a/docs/data-sources/quickstart_packages.md +++ b/docs/data-sources/quickstart_packages.md @@ -17,13 +17,17 @@ data "fivetran_quickstart_packages" "test" { ## Schema -### Required +### Read-Only -- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system +- `packages` (Block List) (see [below for nested schema](#nestedblock--packages)) -### Read-Only + +### Nested Schema for `packages` + +Read-Only: - `connector_types` (Set of String) The set of connector types +- `id` (String) The unique identifier for the Quickstart transformation package definition within the Fivetran system - `name` (String) The Quickstart transformation package name - `output_model_names` (Set of String) The list of transformation output models - `version` (String) The Quickstart package definition version \ No newline at end of file diff --git a/docs/data-sources/transformation_project.md b/docs/data-sources/transformation_project.md new file mode 100644 index 00000000..2a7ae450 --- /dev/null +++ b/docs/data-sources/transformation_project.md @@ -0,0 +1,47 @@ +--- +page_title: "Data Source: fivetran_transformation_project" +--- + +# Data Source: fivetran_transformation_project + +Returns transformation project details if a valid identifier was provided + +## Example Usage + +```hcl +data "fivetran_transformation_project" "test" { + id = "id" +} +``` + + +## Schema + +### Required + +- `id` (String) The unique identifier for the transformation Project within the Fivetran system. + +### Read-Only + +- `created_at` (String) The timestamp of the transformation Project creation. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the dbt Project. +- `errors` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" +- `group_id` (String) The unique identifier for the group within the Fivetran system. +- `project_config` (Block, Read-only) (see [below for nested schema](#nestedblock--project_config)) +- `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). +- `type` (String) Transformation project type. + + +### Nested Schema for `project_config` + +Read-Only: + +- `dbt_version` (String) The version of transformation that should run the project +- `default_schema` (String) Default schema in destination. This production schema will contain your transformed data. +- `environment_vars` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" +- `folder_path` (String) Folder in Git repo with your transformation project +- `git_branch` (String) Git branch +- `git_remote_url` (String) Git remote URL with your transformation project +- `public_key` (String) Public key to grant Fivetran SSH access to git repository. +- `target_name` (String) Target name to set or override the value from the deployment.yaml +- `threads` (Number) The number of threads transformation will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse. \ No newline at end of file diff --git a/docs/data-sources/transformation_projects.md b/docs/data-sources/transformation_projects.md index 8e12764d..51e2ada5 100644 --- a/docs/data-sources/transformation_projects.md +++ b/docs/data-sources/transformation_projects.md @@ -13,3 +13,20 @@ data "fivetran_transformation_projects" "test" { } ``` + +## Schema + +### Read-Only + +- `projects` (Attributes List) (see [below for nested schema](#nestedatt--projects)) + + +### Nested Schema for `projects` + +Read-Only: + +- `created_at` (String) The timestamp of when the project was created in your account. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the transformation Project. +- `group_id` (String) The name of the group within your account related to the project. +- `id` (String) The unique identifier for the transformation project within the Fivetran system. +- `type` (String) Transformation project type. \ No newline at end of file diff --git a/docs/data-sources/user.md b/docs/data-sources/user.md new file mode 100644 index 00000000..48e604a7 --- /dev/null +++ b/docs/data-sources/user.md @@ -0,0 +1,35 @@ +--- +page_title: "Data Source: fivetran_user" +--- + +# Data Source: fivetran_user + +This data source returns a user object. + +## Example Usage + +```hcl +data "fivetran_user" "my_user" { + id = "anonymous_mystery" +} +``` + + +## Schema + +### Required + +- `id` (String) The unique identifier for the user within the Fivetran system. + +### Read-Only + +- `created_at` (String) The timestamp that the user created their Fivetran account. +- `email` (String) The email address that the user has associated with their user profile. +- `family_name` (String) The last name of the user. +- `given_name` (String) The first name of the user. +- `invited` (Boolean) The field indicates whether the user has been invited to your account. +- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. +- `phone` (String) The phone number of the user. +- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') +- `role` (String) The role that you would like to assign to the user. +- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. \ No newline at end of file diff --git a/docs/data-sources/user_connector_memberships.md b/docs/data-sources/user_connector_memberships.md new file mode 100644 index 00000000..1c3f74c8 --- /dev/null +++ b/docs/data-sources/user_connector_memberships.md @@ -0,0 +1,35 @@ +--- +page_title: "Data Source: fivetran_user_connector_memberships" +--- + +# Data Source: fivetran_user_connector_memberships + +This data source returns a connector membership for user. + +## Example Usage + +```hcl +data "fivetran_user_connector_memberships" "user_connector_membership" { + user_id = "user_id" +} +``` + + +## Schema + +### Required + +- `user_id` (String) The unique identifier for the user within your account. + +### Read-Only + +- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) + + +### Nested Schema for `connector` + +Read-Only: + +- `connector_id` (String) The connector unique identifier +- `created_at` (String) The date and time the membership was created +- `role` (String) The user's role that links the user and the connector \ No newline at end of file diff --git a/docs/data-sources/user_group_memberships.md b/docs/data-sources/user_group_memberships.md new file mode 100644 index 00000000..c1532c5c --- /dev/null +++ b/docs/data-sources/user_group_memberships.md @@ -0,0 +1,35 @@ +--- +page_title: "Data Source: fivetran_user_group_memberships" +--- + +# Data Source: fivetran_user_group_memberships + +This data source returns a list of group memberships for user. + +## Example Usage + +```hcl +data "fivetran_user_group_memberships" "team_group_memberships" { + user_id = "user_id" +} +``` + + +## Schema + +### Required + +- `user_id` (String) The unique identifier for the user within your account. + +### Read-Only + +- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) + + +### Nested Schema for `group` + +Read-Only: + +- `created_at` (String) The date and time the membership was created +- `group_id` (String) The group unique identifier +- `role` (String) The user's role that links the user and the group \ No newline at end of file diff --git a/docs/data-sources/users.md b/docs/data-sources/users.md new file mode 100644 index 00000000..0ef8b4ea --- /dev/null +++ b/docs/data-sources/users.md @@ -0,0 +1,41 @@ +--- +page_title: "Data Source: fivetran_users" +--- + +# Data Source: fivetran_users + +This data source returns a list of all users within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_users" "users" { +} +``` + + +## Schema + +### Optional + +- `id` (String) The ID of this resource. + +### Read-Only + +- `users` (Block Set) (see [below for nested schema](#nestedblock--users)) + + +### Nested Schema for `users` + +Read-Only: + +- `created_at` (String) The timestamp that the user created their Fivetran account +- `email` (String) The email address that the user has associated with their user profile. +- `family_name` (String) The last name of the user. +- `given_name` (String) The first name of the user. +- `id` (String) The unique identifier for the user within your account. +- `invited` (Boolean) The field indicates whether the user has been invited to your account. +- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. +- `phone` (String) The phone number of the user. +- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') +- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. \ No newline at end of file diff --git a/docs/data-sources/webhook.md b/docs/data-sources/webhook.md new file mode 100644 index 00000000..f3d3394c --- /dev/null +++ b/docs/data-sources/webhook.md @@ -0,0 +1,34 @@ +--- +page_title: "Data Source: fivetran_webhook" +--- + +# Data Source: fivetran_webhook + +This data source returns a webhook object. + +## Example Usage + +```hcl +data "fivetran_webhook" "webhook" { + id = "webhook_id" +} +``` + + +## Schema + +### Required + +- `id` (String) The webhook ID + +### Read-Only + +- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events +- `created_at` (String) The webhook creation timestamp +- `created_by` (String) The ID of the user who created the webhook. +- `events` (Set of String) The array of event types +- `group_id` (String) The group ID +- `run_tests` (Boolean) Specifies whether the setup tests should be run +- `secret` (String) The secret string used for payload signing and masked in the response. +- `type` (String) The webhook type (group, account) +- `url` (String) Your webhooks URL endpoint for your application \ No newline at end of file diff --git a/docs/data-sources/webhooks.md b/docs/data-sources/webhooks.md new file mode 100644 index 00000000..3cede41f --- /dev/null +++ b/docs/data-sources/webhooks.md @@ -0,0 +1,40 @@ +--- +page_title: "Data Source: fivetran_webhooks" +--- + +# Data Source: fivetran_webhooks + +This data source returns a list of all webhooks within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_webhooks" "webhooks" { +} +``` + + +## Schema + +### Read-Only + +- `webhooks` (Attributes Set) (see [below for nested schema](#nestedatt--webhooks)) + + +### Nested Schema for `webhooks` + +Required: + +- `id` (String) The webhook ID + +Read-Only: + +- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events +- `created_at` (String) The webhook creation timestamp +- `created_by` (String) The ID of the user who created the webhook. +- `events` (Set of String) The array of event types +- `group_id` (String) The group ID +- `run_tests` (Boolean) Specifies whether the setup tests should be run +- `secret` (String) The secret string used for payload signing and masked in the response. +- `type` (String) The webhook type (group, account) +- `url` (String) Your webhooks URL endpoint for your application \ No newline at end of file diff --git a/docs/guides/connector_setup.md b/docs/guides/connector_setup.md new file mode 100644 index 00000000..c7c074d3 --- /dev/null +++ b/docs/guides/connector_setup.md @@ -0,0 +1,142 @@ +---- +page_title: "Initial Setup" +subcategory: "Getting Started" +--- + +# How to set up your Fivetran environment using Terraform + +In this guide, we will set up a simple pipeline with one source using Fivetran Terraform Provider. + +## Provider setup + +First of all, you need to get your [Fivetran API Key and Secret](https://fivetran.com/docs/rest-api/getting-started#gettingstarted) and save it into environment variables: + +```bash +export FIVETRAN_APIKEY= +export FIVETRAN_APISECRET= +``` + +```hcl +# Terraform 0.13+ uses the Terraform Registry: + +terraform { + required_providers { + fivetran = { + version = ">= 1.0.0" + source = "fivetran/fivetran" + } + } +} + +# Configure the Fivetran provider +provider "fivetran" { +# We recommend to use environment variables `FIVETRAN_APIKEY` and `FIVETRAN_APISECRET` instead of explicit assignment +# api_key = var.fivetran_api_key +# api_secret = var.fivetran_api_secret +} + +# Terraform 0.12- can be specified as: + +# Configure the Fivetran provider +# provider "fivetran" { +# api_key = "${var.fivetran_api_key}" +# api_secret = "${var.fivetran_api_secret}" +# } +``` + +## Add your group and destination + +The root resource for your Fivetran infrastructure setup is always `Destination group`. First of all, you need to set up the group: + +```hcl +resource "fivetran_group" "group" { + name = "MyGroup" +} +``` + +Once you have created the group, you need to associate a `Destination` with it: + +```hcl +resource "fivetran_destination" "destination" { + group_id = fivetran_group.group.id + service = "postgres_rds_warehouse" + time_zone_offset = "0" + region = "GCP_US_EAST4" + trust_certificates = "true" + trust_fingerprints = "true" + daylight_saving_time_enabled = "true" + run_setup_tests = "true" + + config { + host = "destination.host" + port = 5432 + user = "postgres" + password = "myPassword" + database = "myDatabaseName" + connection_type = "Directly" + } + + # setup tests operation could take time + # you can define custom timeout for create and update + # default values for destination resource is 30 minutes + timeouts { + create = "60m" + update = "60m" + } +} +``` + +## Add your first connector + +We are now ready to set up our first connector: + +```hcl +resource "fivetran_connector" "connector" { + group_id = fivetran_group.group.id + service = "fivetran_log" + run_setup_tests = true + + destination_schema { + name = "my_fivetran_log_connector" + } + + config { + is_account_level_connector = "false" + } + + # setup tests operation could take time + # you can define custom timeout for create and update + # default values for connector resource is 30 minutes + timeouts { + create = "60m" + update = "60m" + } + + depends_on = [ + fivetran_destination.destination + ] +} +``` + +## Configure connector schedule + +We should configure how connector will be scheduled to sync: + +```hcl +resource "fivetran_connector_schedule" "connector_schedule" { + connector_id = fivetran_connector.connector.id + sync_frequency = 60 + paused = false + pause_after_trial = false +} +``` + +Now we are ready to apply our configuration: + +```bash +terraform apply +``` + +## Example configuration + +An example .tf file with the configuration could be found [here](https://github.com/fivetran/terraform-provider-fivetran/tree/main/config-examples/connector_setup.tf). \ No newline at end of file diff --git a/docs/guides/dbt_private_git_deploy_key.md b/docs/guides/dbt_private_git_deploy_key.md new file mode 100644 index 00000000..f85e87e9 --- /dev/null +++ b/docs/guides/dbt_private_git_deploy_key.md @@ -0,0 +1,62 @@ +---- +page_title: "Dbt Project Setup With Git Private Repo" +subcategory: "Getting Started" +--- + +# How to set up a dbt Project with private Git Repo. + +To be able to use private dbt Project Git repository you have to grant Fivetran access to this repo. +To do that you need to add a Deploy Key to your repository. +To get SSH key from Fivetran create `fivetran_dbt_project` resource: + +```hcl +resource "fivetran_group" "my_group" { + name = "My_Group" +} + +resource "fivetran_dbt_project" "project" { + group_id = fivetran_group.my_group.id + dbt_version = "1.3.2" + threads = 1 + default_schema = "your_project_default_schema" + type = "GIT" +} +``` + +Then you need to set up the dbt Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_dbt_project.test_project.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_dbt_project.test_project.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. + +And after that you can configure your project in `fivetran_dbt_git_project_config` resource: + +```hcl +resource "fivetran_dbt_git_project_config" "project_config" { + id = fivetran_dbt_project.project.id + + git_remote_url = "git@github.com:repo-owner/repo-name.git" + git_branch = "main" +} +``` + diff --git a/docs/guides/dbt_transformation.md b/docs/guides/dbt_transformation.md new file mode 100644 index 00000000..b95b1d99 --- /dev/null +++ b/docs/guides/dbt_transformation.md @@ -0,0 +1,92 @@ +---- +page_title: "Dbt Project and Transformation Setup" +subcategory: "Getting Started" +--- + +# How to set up a dbt Project and Transformation schedule. + +In this guide, we will set up a simple pipeline with one dbt Transformation using Fivetran Terraform Provider. + +## Prerequisites + +To create a project you need to have a group and destination. + +You can use existing ones, or configure a new ones using terraform: + +```hcl +resource "fivetran_group" "group" { + name = "MyGroup" +} +``` + +Once you have created the group, you need to associate a `Destination` with it: + +```hcl +resource "fivetran_destination" "destination" { + group_id = fivetran_group.group.id + service = "postgres_rds_warehouse" + time_zone_offset = "0" + region = "GCP_US_EAST4" + trust_certificates = "true" + trust_fingerprints = "true" + run_setup_tests = "true" + + config { + host = "destination.host" + port = 5432 + user = "postgres" + password = "myPassword" + database = "myDatabaseName" + connection_type = "Directly" + } +} +``` + +-> Note: you destination need to have `connected` status before dbt Project setup. + +## Add `fivetran_dbt_project` resource. + +Follow our [dbt Project setup guide](https://fivetran.com/docs/transformations/dbt/setup-guide#prerequisites) to complete prerequisites for project creation. +After that let's configure dbt Project resource: + +```hcl +resource "fivetran_dbt_project" "project" { + group_id = fivetran_destination.destination.id + dbt_version = "1.3.2" + threads = 1 + default_schema = "your_project_default_schema" + type = "GIT" + project_config { + git_remote_url = "git@github.com:your_project_git_remote.git" + git_branch = "main" + } +} +``` + +Project creation and initialization takes time, so it's OK if resource creation takes 7-10 minutes. + +## Configure your dbt Transformation schedule + +You can configure your first Fivetran dbt Transformation with `fivetran_dbt_transformation` resource: + +```hcl +resource "fivetran_dbt_transformation" "test_transformation" { + dbt_project_id = fivetran_dbt_project.project.id + dbt_model_name = "your_dbt_model_name" + paused = false + run_tests = false + schedule { + schedule_type = "INTERVAL" + days_of_week = ["MONDAY"] + interval = 60 + } +} +``` + +Above consfiguration will schedule model with name `your_dbt_model_name` on mondays each 60 minutes. + +Now we are ready to apply our configuration: + +```bash +terraform apply +``` \ No newline at end of file diff --git a/docs/guides/schema_json.md b/docs/guides/schema_json.md new file mode 100644 index 00000000..79b0d866 --- /dev/null +++ b/docs/guides/schema_json.md @@ -0,0 +1,53 @@ +---- +page_title: "Using schemas_json field" +subcategory: "Getting Started" +--- + +# How to set up Fivetran connector schema config using Terraform in `.json` format. + +In cases when schema configuration is really big and you have to define more that 1000 tables settings it's better to set schema settings directly using `.json` file: + +File `schema-config.json`: +```json +{ + "schema_0": { + "enabled": true, + "some_random_extra_field": "extra_value", + "tables": { + "table_0": { + "some_random_extra_field": "extra_value", + "enabled": true + }, + ... + } + }, + "schema_2": { + "enabled": true, + "some_random_extra_field": "extra_value", + "tables": { + "table_0": { + "some_random_extra_field": "extra_value", + "enabled": true, + "columns": { + "column_1": { + "enabled": false + } + } + }, + ... + } + }, + ... +} +``` + +Configuration `.tf` file: +```hcl +resource "fivetran_connector_schema_config" "test_schema" { + provider = fivetran-provider + connector_id = "connector_id" + schema_change_handling = "ALLOW_COLUMNS" + schemas_json = file("path/to/schema-config.json") +} +``` +-> NOTE: Please make sure that the `enabled` field inside the JSON is set to boolean data type. \ No newline at end of file diff --git a/docs/guides/schema_setup.md b/docs/guides/schema_setup.md new file mode 100644 index 00000000..abfffdb1 --- /dev/null +++ b/docs/guides/schema_setup.md @@ -0,0 +1,100 @@ +---- +page_title: "Connector Schema Setup" +subcategory: "Getting Started" +--- + +# How to set up Fivetran connector schema config using Terraform + +In this guide, we will set up a simple pipeline with one connector and schema using Fivetran Terraform Provider. + +## Create a connector resource + +Create the `fivetran_connector` resource: + +```hcl +resource "fivetran_connector" "connector" { + ... + run_setup_tests = "true" # it is necessary to authorise connector +} +``` + +Connector will be in the paused state, but ready to sync. + +-> Connector should be **authorized** to be able to fetch schema from source. Set `run_setup_tests = "true"`. + +## Set up connector schema config + +Let's define what exactly we want to sync by using the `fivetran_connector_schema_config` resource: + +```hcl +resource "fivetran_connector_schema_config" "connector_schema" { + connector_id = fivetran_connector.connector.id + schema_change_handling = "BLOCK_ALL" + schemas = { + "my_fivetran_log_connector" = { + enabled = true + tables = { + "log" = { + enabled = true + columns = { + "event" = { + enabled = true + } + "message_data" = { + enabled = true + } + "message_event" = { + enabled = true + } + "sync_id" = { + enabled = true + } + } + } + } + } + } + # before applying schema resource will trigger "Reload connector schema config" endpoint + # it could take time for slow sources or for source with huge connector_schema_setup + # to prevent timeouts you can set custom timeouts + timeouts { + create = "6h" + read = "6h" + update = "6h" + } + # if you not sure in timing you can set timeouts to 0 - it means `no timeout` + # WARNING: not recommended - this could lead to unpredictable apply process hanging + #timeouts { + # create = "0" + # read = "0" + # update = "0" + #} +} +``` + +## Set up connector schedule configuration + +-> The schedule should depend on the schema resource to enable the connector **after** the schema changes are applied. + +```hcl +resource "fivetran_connector_schedule" "my_connector_schedule" { + connector_id = fivetran_connector_schema_config.connector_schema.id + + sync_frequency = "5" + + paused = false + pause_after_trial = true + + schedule_type = "auto" +} +``` + +## Apply configuration + +```bash +terraform apply +``` + +## Example configuration + +An example .tf file with the configuration could be found [here](https://github.com/fivetran/terraform-provider-fivetran/tree/main/config-examples/connector_schema_setup.tf). \ No newline at end of file diff --git a/docs/guides/version_0.7.2_update_guides.md b/docs/guides/version_0.7.2_update_guides.md new file mode 100644 index 00000000..0ca86752 --- /dev/null +++ b/docs/guides/version_0.7.2_update_guides.md @@ -0,0 +1,108 @@ +---- +page_title: "Version Update 0.7.2" +subcategory: "Upgrade Guides" +--- + +# Version 0.7.2 + +## What's new in 0.7.2 + +In version `0.7.2` of Fivetran Terraform provider, resource `fivetran_connector` is separated onto two resources: +- `fivetran_connector` resource +- `fivetran_connector_schedule` resource +With this new structure, it's now possible to create a connector, define the schema config for it, and enable it in one `apply` cycle without intermediate stages. +Before this version, you had to "un-pause" connector after applying initial schema configuration with additional `apply` to avoid unneeded data to be synced. + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 0.7.1" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 0.7.2" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_connector` + +Update all your connector resources (`fivetran_connector`): + +Previous configuration: + +```hcl +resource "fivetran_connector" "test_connector" { + + group_id = "worker_tennis" + service = "fivetran_log" + + destination_schema { + name = "fivetran_log_schema" + } + + sync_frequency = "1440" + daily_sync_time = "6:00" + paused = false + pause_after_trial = false + + run_setup_tests = true + config { + group_name = "worker_tennis" + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_connector" "test_connector" { + group_id = "worker_tennis" + service = "fivetran_log" + + destination_schema { + name = "fivetran_log_schema" + } + + run_setup_tests = true + + config { + group_name = "worker_tennis" + } +} +resource "fivetran_connector_schedule" "test_connector_schedule" { + connector_id = fivetran_connector.test_connector.id + + sync_frequency = "1440" + daily_sync_time = "6:00" + paused = false + pause_after_trial = false + + schedule_type = "auto" +} + +``` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/docs/guides/version_1.1.18_update_guides.md b/docs/guides/version_1.1.18_update_guides.md new file mode 100644 index 00000000..2a97c8ef --- /dev/null +++ b/docs/guides/version_1.1.18_update_guides.md @@ -0,0 +1,98 @@ +---- +page_title: "Version Update 1.1.18" +subcategory: "Upgrade Guides" +--- + +# Version 1.1.18 + +## What's new in 1.1.18 + +In version `1.1.18` of Fivetran Terraform provider, resource `fivetran_connector_schema_config` behavior changed: +- If no columns settings specified in `table.columns` no settings will be applied. If table enabled - columns won't be blocked automatically by `BLOCK_ALL` policy. +- Settings for sub-elements won't be managed if root element disabled: for `BLOCK_ALL` policy for disabled schema no settings for tables/columns will be applied. + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 1.1.17" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 1.1.18" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_connector_schema_config` + +Update all your connector schema config resources (`fivetran_connector_schema_config`): + +Previous configuration: + +```hcl +resource "fivetran_connector_schema_config" "test_schema" { + connector_id = "connector_id" + schema_change_handling = "ALLOW_ALL" + + schema { + name = "schema_name" + table { + name = "table_name" + sync_mode = "HISTORY" + column { + name = "hashed_column_name" + hashed = "true" + } + } + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_connector_schema_config" "test_schema" { + connector_id = "connector_id" + schema_change_handling = "ALLOW_ALL" + + schemas = { + "schema_name" = { + tables = { + "table_name" = { + sync_mode = "HISTORY" + columns = { + "hashed_column_name" = { + hashed = true + } + } + } + } + } + } +} + +``` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/docs/guides/version_1.3.0_update_guides.md b/docs/guides/version_1.3.0_update_guides.md new file mode 100644 index 00000000..fdca57a1 --- /dev/null +++ b/docs/guides/version_1.3.0_update_guides.md @@ -0,0 +1,116 @@ +---- +page_title: "Version Update 1.3.0" +subcategory: "Upgrade Guides" +--- + +# Version 1.3.0 + +## What's new in 1.3.0 + +In version `1.3.0` of Fivetran Terraform provider, resource `fivetran_dbt_project` behavior changed: +- installation of the DBT project configuration should now occur in a separate resource `fivetran_dbt_git_project_config`, after installing the key in the repository + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 1.2.8" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 1.3.0" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_dbt_project` + +Update all your connector schema config resources (`fivetran_dbt_project`): + +Previous configuration: + +```hcl +resource "fivetran_dbt_project" "test_project" { + provider = fivetran-provider + group_id = fivetran_destination.test_destination.id + dbt_version = "1.0.1" + threads = 1 + default_schema = "dbt_demo_test_e2e_terraform" + type = "GIT" + project_config { + folder_path = "/folder/path" + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_dbt_project" "test_project" { + provider = fivetran-provider + group_id = fivetran_destination.test_destination.id + dbt_version = "1.0.1" + threads = 1 + default_schema = "dbt_demo_test_e2e_terraform" + type = "GIT" +} +``` + +For GitHub based repositories +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "fivetran/repo-name" + key = fivetran_dbt_project.test_project.public_key + read_only = true +} +``` + +For Bitbucket based repositories +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "fivetran" + repository = "repo-name" + key = fivetran_dbt_project.test_project.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. + + +```hcl +resource "fivetran_dbt_git_project_config" "test_project_config" { + project_id = fivetran_dbt_project.test_project.id + + folder_path = "/folder/path" + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" +} + +``` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/docs/guides/version_1.4.0_update_guides.md b/docs/guides/version_1.4.0_update_guides.md new file mode 100644 index 00000000..5aa83333 --- /dev/null +++ b/docs/guides/version_1.4.0_update_guides.md @@ -0,0 +1,104 @@ +---- +page_title: "Version Update 1.4.0" +subcategory: "Upgrade Guides" +--- + +# Version 1.4.0 + +## What's new in 1.4.0 + +In version `1.4.0` of Fivetran Terraform provider, resource `fivetran_local_processing_agent` renamed to `fivetran_hybrid_deployment_agent` + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 1.3.0" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 1.4.0" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_hybrid_deployment_agent` + +Update all your local processing agent resources (`fivetran_local_processing_agent`): + +Previous configuration: + +```hcl +resource "fivetran_local_processing_agent" "test_agent" { +} +``` + +Updated configuration: + +```hcl +resource "fivetran_hybrid_deployment_agent" "test_agent" { +} +``` + +### Resource `fivetran_connector` + +Update all your connector resources (`fivetran_connector`): + +Previous configuration: + +```hcl +resource "fivetran_connector" "test_connector" { + local_processing_agent_id = agent_id +} +``` + +Updated configuration: + +```hcl +resource "fivetran_connector" "test_connector" { + hybrid_deployment_agent_id = agent_id +} +``` + +### Resource `fivetran_destination` + +Update all your destination resources (`fivetran_destination`): + +Previous configuration: + +```hcl +resource "fivetran_destination" "test_destination" { + local_processing_agent_id = agent_id +} +``` + +Updated configuration: + +```hcl +resource "fivetran_destination" "test_destination" { + hybrid_deployment_agent_id = agent_id +} +``` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..54712d24 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,51 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "Fivetran Provider" +subcategory: "" +description: |- + +--- + +# fivetran Provider + +## Example Usage + +```terraform +# Terraform 0.13+ uses the Terraform Registry: + +terraform { + required_providers { + fivetran = { + version = ">= 1.0.0" + source = "fivetran/fivetran" + } + } +} + +# Configure the Fivetran provider +provider "fivetran" { +# We recommend to use environment variables instead of explicit assignment +# api_key = var.fivetran_api_key +# api_secret = var.fivetran_api_secret +} + + +# Terraform 0.12- can be specified as: + +# Configure the Fivetran provider +# provider "fivetran" { +# api_key = "${var.fivetran_api_key}" +# api_secret = "${var.fivetran_api_secret}" +# } +``` + +## Schema + +### Required + +- `api_key` (String) +- `api_secret` (String, Sensitive) + +### Optional + +- `api_url` (String) \ No newline at end of file diff --git a/docs/resources/connector.md b/docs/resources/connector.md new file mode 100644 index 00000000..04f939a2 --- /dev/null +++ b/docs/resources/connector.md @@ -0,0 +1,3961 @@ +--- +page_title: "Resource: fivetran_connector" +--- + +# Resource: fivetran_connector + +This resource allows you to create, update, and delete connectors. + +## Example Usage + +```hcl +resource "fivetran_connector" "amplitude" { + group_id = fivetran_group.group.id + service = "amplitude" + + destination_schema { + name = "amplitude_connector" + } + + config { + project_credentials { + project = "project1" + api_key = "my_api_key" + secret_key = "my_secret_key" + } + + project_credentials { + project = "project2" + api_key = "my_api_key" + secret_key = "my_secret_key" + } + } +} +``` + +-> Use `destination_schema` to define connector schema configuration. Field `destination_schema.name` will be mapped into `config.schema` in REST API payload. Field `destination_schema.table` will be mapped into `config.table` in REST API payload. Field `destination_schema.prefix` will be mapped into `config.schema_prefix` in REST API payload. Specify values according to [public documentation](https://fivetran.com/docs/rest-api/connectors/config) for particular connector type. + +### NOTE: resources indirect dependencies + +The connector resource receives the `group_id` parameter value from the group resource, but the destination resource depends on the group resource. When you try to destroy the destination resource infrastructure, the terraform plan is created successfully, but once you run the `terraform apply` command, it returns an error because the Fivetran API doesn't let you delete destinations that have linked connectors. To solve this problem, you should either explicitly define `depends_on` between the connector and destination: + +```hcl +resource "fivetran_connector" "amplitude" { + ... + depends_on = [ + fivetran_destination.my_destination + ] +} +``` + +or get the group ID from the destination: + +```hcl +resource "fivetran_connector" "amplitude" { + group_id = fivetran_destination.my_destination.group_id + ... +} +``` + + +## Schema + +### Required + +- `group_id` (String) The unique identifier for the Group (Destination) within the Fivetran system. +- `service` (String) The connector type id within the Fivetran system. + +### Optional + +- `auth` (Block, Optional) (see [below for nested schema](#nestedblock--auth)) +- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) +- `data_delay_sensitivity` (String) The level of data delay notification threshold. Possible values: LOW, NORMAL, HIGH, CUSTOM. The default value NORMAL. CUSTOM is only available for customers using the Enterprise plan or above. +- `data_delay_threshold` (Number) Custom sync delay notification threshold in minutes. The default value is 0. This parameter is only used when data_delay_sensitivity set to CUSTOM. +- `destination_schema` (Block, Optional) (see [below for nested schema](#nestedblock--destination_schema)) +- `hybrid_deployment_agent_id` (String) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. +- `local_processing_agent_id` (String, Deprecated) (Deprecated) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. +- `networking_method` (String) Possible values: Directly, SshTunnel, ProxyAgent. +- `private_link_id` (String) The private link ID. +- `proxy_agent_id` (String) The proxy agent ID. +- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is FALSE. +- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) +- `trust_certificates` (Boolean) Specifies whether we should trust the certificate automatically. The default value is FALSE. If a certificate is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination certificate](https://fivetran.com/docs/rest-api/certificates#approveadestinationcertificate). +- `trust_fingerprints` (Boolean) Specifies whether we should trust the SSH fingerprint automatically. The default value is FALSE. If a fingerprint is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination fingerprint](https://fivetran.com/docs/rest-api/certificates#approveadestinationfingerprint). + +### Read-Only + +- `connected_by` (String) The unique identifier of the user who has created the connector in your account. +- `created_at` (String) The timestamp of the time the connector was created in your account. +- `id` (String) The unique identifier for the connector within the Fivetran system. +- `name` (String) The name used both as the connector's name within the Fivetran system and as the source schema's name within your destination. + + +### Nested Schema for `auth` + +Optional: + +- `access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `autodesk_bim_360`: Your Autodesk BIM 360 Access Token. + - Service `azure_sql_db`: The long-lived Access token carries the information necessary to access API resources. + - Service `azure_sql_managed_db`: The long-lived Access token carries the information necessary to access API resources. + - Service `billing_platform`: Your BillingPlatform access token. + - Service `calendly`: Your Calendly access token. + - Service `docebo`: Your Docebo Access Token. + - Service `drift`: Your Drift access token. + - Service `employment_hero`: Your Employment Hero access token. + - Service `facebook_ads`: The long-lived `Access token` along with the `client_id` and `client_secret` parameters carry the information necessary to query the Facebook Ads API + - Service `facebook_pages`: The `Access Token` carries the information necessary for API resources to fetch data + - Service `freshbooks`: Your FreshBooks Access Token. + - Service `gitlab`: Your GitLab access token. + - Service `google_business_profile`: Your Google Business Profile Access token. + - Service `google_calendar`: Your Google Calendar access token. + - Service `google_classroom`: The `Access Token` that carries the information necessary for API resources to fetch data. + - Service `google_tasks`: The access token that carries the information necessary for API resources to your Google Tasks fetch data. + - Service `instagram_business`: The `Access Token` carries the information necessary for API resources to fetch data + - Service `intercom`: The long-lived `Access Token` carries the information necessary for API resources to fetch data. + - Service `medallia`: Your Medallia access token that contains all the information necessary for the API resources to fetch your data. + - Service `pinterest_organic`: Your Pinterest access token. + - Service `ramp`: Your Ramp access token. + - Service `ringcentral`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `shopify`: The Shopify access token. + - Service `slack`: Your Slack access token. + - Service `stripe`: The Stripe API Restricted Key + - Service `stripe_test`: The Stripe API Restricted Key + - Service `survey_monkey`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `tiktok_ads`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `typeform`: The Typeform API access token. + - Service `yahoo_search_ads_yahoo_japan`: Your Yahoo Search Ads Access Token. + - Service `zendesk`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `zendesk_chat`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `zendesk_sell`: The long-lived `Access token` carries the information necessary to access API resources. + - Service `zoom`: Your Zoom Access token. +- `api_key` (String) Field usage depends on `service` value: + - Service `elastic_cloud`: The Elasticsearch API key. If omitted, then basic user and password authentication will apply. + - Service `es_self_hosted`: The Elasticsearch API key. If omitted, then basic user and password authentication will apply. +- `aws_access_key` (String) Field usage depends on `service` value: + - Service `amazon_selling_partner`: `AWS Access Key` of your AWS Account User. +- `aws_secret_key` (String) Field usage depends on `service` value: + - Service `amazon_selling_partner`: `AWS Secret Key` of your AWS Account User. +- `client_access` (Block, Optional) (see [below for nested schema](#nestedblock--auth--client_access)) +- `client_id` (String) Field usage depends on `service` value: + - Service `amazon_selling_partner`: `Client ID` of your Amazon Seller/Vendor Central client application. + - Service `apple_search_ads`: Apple Search Ads REST API Client ID. Must be populated if `is_auth2_enabled` is set to `true`. + - Service `workday`: Client ID + - Service `workday_financial_management`: ID of your Workday Client App + - Service `workday_hcm`: ID of your Workday Client App + - Service `yahoo_dsp`: Your Yahoo DSP Client ID. +- `client_secret` (String) Field usage depends on `service` value: + - Service `amazon_selling_partner`: `Client Secret` of your Amazon Seller/Vendor Central client application. + - Service `workday`: Client Secret + - Service `workday_financial_management`: Secret of your Workday Client App + - Service `workday_hcm`: Secret of your Workday Client App + - Service `yahoo_dsp`: Your Yahoo DSP Client Secret. +- `consumer_key` (String) Field usage depends on `service` value: + - Service `twitter`: API Key of your app + - Service `twitter_ads`: The Twitter App consumer key. +- `consumer_secret` (String) Field usage depends on `service` value: + - Service `twitter`: API Secret of your app + - Service `twitter_ads`: The Twitter App consumer secret. +- `key_id` (String) Field usage depends on `service` value: + - Service `apple_search_ads`: Apple Search Ads REST API Key ID. Must be populated if `is_auth2_enabled` is set to `true`. +- `oauth_token` (String) Field usage depends on `service` value: + - Service `twitter`: The Twitter App access token. + - Service `twitter_ads`: The Twitter App access token. +- `oauth_token_secret` (String) Field usage depends on `service` value: + - Service `twitter`: The Twitter App access token secret. + - Service `twitter_ads`: The Twitter App access token secret. +- `ocapi_access_token` (String) +- `ocapi_refresh_token` (String) +- `previous_refresh_token` (String, Sensitive) Field usage depends on `service` value: + - Service `dynamics_365`: Previous `Refresh token` of your application. +- `realm_id` (String) Field usage depends on `service` value: + - Service `quickbooks`: `Realm ID` of your QuickBooks application. +- `refresh_token` (String, Sensitive) Field usage depends on `service` value: + - Service `adroll`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `airtable`: The long-lived refresh token along with the client ID and client secret carry the information necessary to get a new access token for API resources. + - Service `amazon_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `amazon_selling_partner`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `asana`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `autodesk_bim_360`: Your Autodesk BIM 360 Refresh Token. + - Service `azure_service_bus`: The refresh token. Required if the authentication type is `AzureActiveDirectory` + - Service `azure_sql_db`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. + - Service `azure_sql_managed_db`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. + - Service `billing_platform`: Your BillingPlatform refresh token. + - Service `bingads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `business_central`: The long-lived refresh token along with the client ID and client secret parameters carry the information necessary to get a new access token for API resources. + - Service `calendly`: Your Calendly refresh token. + - Service `docebo`: Your Docebo Refresh Token. + - Service `double_click_campaign_manager`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `double_click_publishers`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `drift`: Your Drift refresh token. + - Service `dropbox`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `dynamics_365`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `employment_hero`: Your Employment Hero refresh token. + - Service `financial_force`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `freshbooks`: Your FreshBooks Refresh Token. + - Service `front`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `gitlab`: Your GitLab refresh token. + - Service `google_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_analytics`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_analytics_4`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_analytics_mcf`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_business_profile`: Your Google Business Profile Refresh token. + - Service `google_calendar`: Your Google Calendar refresh token. + - Service `google_classroom`: The long-lived `Refresh token` of your Google Calendar client application. + - Service `google_display_and_video_360`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_play`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_search_ads_360`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_sheets`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `google_tasks`: The long-lived refresh token of your Google Tasks client application. + - Service `helpscout`: The long-lived `refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `hubspot`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `linkedin_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `linkedin_company_pages`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `microsoft_lists`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. + - Service `one_drive`: The long-lived `Refresh token` carries the information necessary to get a new access token for API resources. + - Service `optimizely`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `outreach`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `pinterest_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `pinterest_organic`: Your Pinterest refresh token. + - Service `pipedrive`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `qualtrics`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `quickbooks`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `ramp`: Your Ramp refresh token. + - Service `reddit_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `salesforce`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `salesforce_sandbox`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `salesloft`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `share_point`: The long-lived Refresh token carries the information necessary to get a new access token for API resources. + - Service `slack`: Your Slack refresh token. + - Service `snapchat_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `spotify_ads`: The long-lived `Refresh token` along with the `client_id` and `client_secret` parameters carry the information necessary to get a new access token for API resources. + - Service `typeform`: The Typeform API refresh token. + - Service `workday`: OAuth Refresh Token + - Service `yahoo_search_ads_yahoo_japan`: Your Yahoo Search Ads Refresh Token. + - Service `zoho_crm`: The long-lived `Refresh token`, along with the `client_id` and `client_secret` parameters, carries the information necessary to get a new access token for API resources. + - Service `zoom`: Your Zoom Refresh token. +- `role_arn` (String) Field usage depends on `service` value: + - Service `amazon_selling_partner`: `IAM Role ARN` of your AWS Account. +- `team_id` (String) Field usage depends on `service` value: + - Service `apple_search_ads`: Apple Search Ads REST API Team ID. Must be populated if `is_auth2_enabled` is set to `true`. +- `user_access_token` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Access Token + + +### Nested Schema for `auth.client_access` + +Optional: + +- `client_id` (String) Field usage depends on `service` value: + - Service `adroll`: `Client ID` of your AdRoll client application. + - Service `airtable`: `Client ID` of your Airtable client application. + - Service `amazon_ads`: `Client ID` of your Amazon Ads client application. + - Service `asana`: `Client ID` of your Asana client application. + - Service `azure_service_bus`: `Client ID` of your Azure application. Required if the authentication type is `AzureActiveDirectory` + - Service `bingads`: `Client ID` of your Microsoft Advertising client application. + - Service `business_central`: `Client ID` of your Airtable client application. + - Service `double_click_campaign_manager`: `Client ID` of your Google Campaign Manager 360 client application. + - Service `double_click_publishers`: `Client ID` of your Google Ad Manager client application. + - Service `dropbox`: `Client ID` of your Dropbox client application. + - Service `dynamics_365`: `Client ID` of your Dynamic 365 client application, or Service Principal. + - Service `facebook_ads`: `Client ID` of your Facebook client application. + - Service `facebook_pages`: `Client ID` of your Facebook client application. + - Service `financial_force`: `Client ID` of your Salesforce client application. + - Service `front`: `Client ID` of your Front client application. + - Service `google_ads`: `Client ID` of your Google Ads client application. + - Service `google_analytics`: `Client ID` of your Google Analytics client application. + - Service `google_analytics_4`: `Client ID` of your Google Analytics client application. + - Service `google_analytics_mcf`: `Client ID` of your Google Analytics client application. + - Service `google_display_and_video_360`: `Client ID` of your Google Display & Video 360 client application. + - Service `google_play`: `Client ID` of your Google Play client application. + - Service `google_search_ads_360`: `Client ID` of your Google Search Ads 360 client application. + - Service `google_search_console`: `Client ID` of your Google Search Console client application. + - Service `google_sheets`: `Client ID` of your Google Sheets client application. + - Service `helpscout`: `Client ID` of your Help Scout client application. + - Service `hubspot`: `Client ID` of your HubSpot client application. + - Service `instagram_business`: `Client ID` of your Facebook client application. + - Service `linkedin_ads`: `Client ID` of your LinkedIn client application. + - Service `linkedin_company_pages`: `Client ID` of your LinkedIn client application. + - Service `microsoft_lists`: `Client ID` of your Microsoft client application. + - Service `one_drive`: `Client ID` of your Microsoft OneDrive client application. + - Service `optimizely`: `Client ID` of your Optimizely client application. + - Service `outreach`: `Client ID` of your Outreach client application. + - Service `pardot`: `Client ID` of your Pardot client application. + - Service `pinterest_ads`: `Client ID` of your Pinterest client application. + - Service `pipedrive`: `Client ID` of your Pipedrive client application. + - Service `qualtrics`: `Client ID` of your Qualtrics client application. + - Service `quickbooks`: `Client ID` of your QuickBooks client application. + - Service `reddit_ads`: `Client ID` of your Reddit Ads client application. + - Service `salesforce`: `Client ID` of your Salesforce client application. + - Service `salesforce_sandbox`: `Client ID` of your Salesforce client application. + - Service `share_point`: `Client ID` of your Microsoft client application. + - Service `snapchat_ads`: `Client ID` of your Snapchat Ads client application. + - Service `spotify_ads`: `Client ID` of your Ad Studio application. + - Service `survey_monkey`: `Client ID` of your SurveyMonkey client application. + - Service `tiktok_ads`: `Client ID` of your TikTok Ads client application. + - Service `twitter`: `Client ID` of your Twitter client application. + - Service `twitter_ads`: `Client ID` of your Twitter Ads client application. + - Service `typeform`: The Typeform client ID. + - Service `yahoo_gemini`: `Client ID` of your Yahoo Gemini client application. + - Service `youtube_analytics`: `Client ID` of your Youtube client application. + - Service `zoho_crm`: `Client ID` of your Zoho client application. +- `client_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `adroll`: `Client Secret` of your AdRoll client application. + - Service `airtable`: `Client Secret` of your Airtable client application. + - Service `amazon_ads`: `Client Secret` of your Amazon Ads client application. + - Service `asana`: `Client Secret` of your Asana client application. + - Service `azure_service_bus`: `Client Secret` of your Azure application. Required if the authentication type is `AzureActiveDirectory` + - Service `bingads`: `Client Secret` of your Microsoft Advertising client application. + - Service `business_central`: `Client Secret` of your Airtable client application. + - Service `double_click_campaign_manager`: `Client Secret` of your Google Campaign Manager 360 client application. + - Service `double_click_publishers`: `Client Secret` of your Google Ad Manager client application. + - Service `dropbox`: `Client Secret` of your Dropbox client application. + - Service `dynamics_365`: `Client Secret` of your Dynamic 365 client application, or Service Principal. + - Service `facebook_ads`: `Client Secret` of your Facebook client application. + - Service `facebook_pages`: `Client Secret` of your Facebook client application. + - Service `financial_force`: `Client Secret` of your Salesforce client application. + - Service `front`: `Client Secret` of your Front client application. + - Service `google_ads`: `Client Secret` of your Google Ads client application. + - Service `google_analytics`: `Client Secret` of your Google Analytics client application. + - Service `google_analytics_4`: `Client Secret` of your Google Analytics client application. + - Service `google_analytics_mcf`: `Client Secret` of your Google Analytics client application. + - Service `google_display_and_video_360`: `Client Secret` of your Google Display & Video 360 client application. + - Service `google_play`: `Client Secret` of your Google Play client application. + - Service `google_search_ads_360`: `Client Secret` of your Google Search Ads 360 client application. + - Service `google_search_console`: `Client Secret` of your Google Search Console client application. + - Service `google_sheets`: `Client Secret` of your Google Sheets client application. + - Service `helpscout`: `Client Secret` of your Help Scout client application. + - Service `hubspot`: `Client Secret` of your HubSpot client application. + - Service `instagram_business`: `Client Secret` of your Facebook client application. + - Service `linkedin_ads`: `Client Secret` of your LinkedIn client application. + - Service `linkedin_company_pages`: `Client Secret` of your LinkedIn client application. + - Service `microsoft_lists`: `Client Secret` of your Microsoft client application. + - Service `one_drive`: `Client Secret` of your Microsoft OneDrive client application. + - Service `optimizely`: `Client Secret` of your Optimizely client application. + - Service `outreach`: `Client Secret` of your Outreach client application. + - Service `pardot`: `Client Secret` of your Pardot client application. + - Service `pinterest_ads`: `Client Secret` of your Pinterest client application. + - Service `pipedrive`: `Client Secret` of your Pipedrive client application. + - Service `qualtrics`: `Client Secret` of your Qualtrics client application. + - Service `quickbooks`: `Client Secret` of your QuickBooks client application. + - Service `reddit_ads`: `Client Secret` of your Reddit Ads client application. + - Service `salesforce`: `Client Secret` of your Salesforce client application. + - Service `salesforce_sandbox`: `Client Secret` of your Salesforce client application. + - Service `share_point`: `Client Secret` of your Microsoft client application. + - Service `snapchat_ads`: `Client Secret` of your Snapchat Ads client application. + - Service `spotify_ads`: `Client Secret` of your Ad Studio application. + - Service `survey_monkey`: `Client Secret` of your SurveyMonkey client application. + - Service `tiktok_ads`: `Client Secret` of your TikTok Ads client application. + - Service `twitter`: `Client Secret` of your Twitter client application. + - Service `twitter_ads`: `Client Secret` of your Twitter Ads client application. + - Service `typeform`: The Typeform client secret. + - Service `yahoo_gemini`: `Client Secret` of your Yahoo Gemini client application. + - Service `youtube_analytics`: `Client Secret` of your Youtube client application. + - Service `zoho_crm`: `Client Secret` of your Zoho client application. +- `developer_token` (String) Field usage depends on `service` value: + - Service `google_ads`: Your approved `Developer token` to connect to the Google Ads API. +- `user_agent` (String) Field usage depends on `service` value: + - Service `google_ads`: Your company's name in your Google Ads client application. + + + + +### Nested Schema for `config` + +Optional: + +- `abs_connection_method` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage connection method +- `abs_connection_string` (String, Sensitive) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage connection string. + - Service `braze`: Connection String +- `abs_container_address` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage container address +- `abs_container_name` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage container name. + - Service `braze`: Container Name +- `abs_host_ip` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage host IP +- `abs_host_user` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage username +- `abs_prefix` (String) Field usage depends on `service` value: + - Service `braze`: Prefix +- `abs_public_key` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Azure Blob Storage public key +- `academy_id` (String) Field usage depends on `service` value: + - Service `workramp`: Your WorkRamp academy ID. +- `access_id` (String) Field usage depends on `service` value: + - Service `planful`: Your Planful access ID. +- `access_key` (String, Sensitive) Field usage depends on `service` value: + - Service `gainsight_customer_success`: The access key for API authentication. + - Service `gongio`: Your Gongio Access key. + - Service `planful`: Your Planful access key. + - Service `retailnext`: Your RetailNext access key. +- `access_key_id` (String, Sensitive) Field usage depends on `service` value: + - Service `appsflyer`: Your AWS access key ID. + - Service `aws_cost_report`: Access Key ID + - Service `checkout`: Your Checkout.com access key ID. + - Service `s3`: Access Key ID + - Service `wasabi_cloud_storage`: Access Key ID +- `access_key_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_cost_report`: Access Key Secret + - Service `checkout`: Your Checkout.com access key secret. + - Service `gongio`: Your Gongio Access Key Secret. + - Service `s3`: Access Key Secret + - Service `wasabi_cloud_storage`: Access Key Secret +- `access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `7shifts`: Your 7shifts access token. + - Service `attio`: Your Attio bearer token + - Service `big_commerce`: API access token of your store. + - Service `bitly`: Your Bitly access token. + - Service `calabrio`: Your Calabrio access token. + - Service `coupa`: Your Coupa access token fetched using client_id and client_secret + - Service `deputy`: Your Deputy API access token. + - Service `getfeedback`: Your GetFeedback Access token. + - Service `gocardless`: Your GoCardless API token. + - Service `ironclad`: Your Ironclad access token. + - Service `kustomer`: Your Kustomer API key. + - Service `lattice`: Your Lattice API access token. + - Service `launchdarkly`: Your LaunchDarkly access token. + - Service `nylas`: Your Nylas access_token. + - Service `planhat`: Your Planhat access token. + - Service `rollbar`: Your Rollbar Access Token. + - Service `samsara`: Your Samsara API token. + - Service `slab`: Your Slab API key. + - Service `talkwalker`: Your Talkwalker access token. + - Service `workable`: Your Workable Access Token. + - Service `workramp`: Your WorkRamp access token. +- `access_type` (String) Field usage depends on `service` value: + - Service `share_point`: Access Type +- `account` (String) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: The NetSuite Account ID. + - Service `ordway`: Your Ordway account type. +- `account_access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `rollbar`: Your Rollbar account access token. +- `account_id` (String) Field usage depends on `service` value: + - Service `appcues`: Your Appcues Account ID. + - Service `brightcove`: Your Brightcove account ID. + - Service `cin7core`: Your Cin7 Core account ID. + - Service `dear`: Your Dear Account ID. + - Service `harvest`: Your Harvest Account ID. + - Service `optimizely`: Your Optimizely account ID. + - Service `udemy_business`: Your Udemy Business account ID. +- `account_ids` (Set of String) Field usage depends on `service` value: + - Service `taboola`: Specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. +- `account_key` (String, Sensitive) Field usage depends on `service` value: + - Service `cosmos`: The read-only primary or secondary account key for the database account. Required for the `ACCOUNT_KEY` data access method. + - Service `simplesat`: Your Simplesat account key. +- `account_name` (String) Field usage depends on `service` value: + - Service `eventsforce`: Your Eventsforce account name. + - Service `freshdesk_contact_center`: Your Freshdesk Contact Center account name. + - Service `happyfox`: Your HappyFox account name. + - Service `maxio_saasoptics`: Your Maxio SaaSOptics account name. + - Service `talkdesk`: Your Talkdesk Account Name. + - Service `udemy_business`: Your Udemy Business account name. +- `account_plan` (String) Field usage depends on `service` value: + - Service `tymeshift`: Your Tymeshift account plan. +- `account_region` (String) Field usage depends on `service` value: + - Service `iterable`: If your Iterable account URL starts with `https://app.eu.iterable.com` then provide `EU` else `US` +- `account_sid` (String) Field usage depends on `service` value: + - Service `fone_dynamics`: Your Fone Dynamics account SID. +- `account_sync_mode` (String) Field usage depends on `service` value: + - Service `itunes_connect`: Account Sync Mode +- `account_token` (String, Sensitive) Field usage depends on `service` value: + - Service `konnect_insights`: Your Konnect Insights Account Token. +- `account_type` (String) Field usage depends on `service` value: + - Service `freightview`: Your Freightview Account Type. +- `accounts` (Set of String) Field usage depends on `service` value: + - Service `bingads`: Specific accounts to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. + - Service `facebook`: List of accounts of which connector will sync the data. + - Service `facebook_ad_account`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `facebook_ads`: List of accounts of which connector will sync the data. + - Service `google_ads`: The list of Account IDs to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `google_analytics`: The list of specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. + - Service `google_analytics_4`: The list of specific Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. + - Service `google_analytics_mcf`: Specific Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. + - Service `google_search_ads_360`: Specific accounts to sync. Must be populated if `accountsSyncMode` is set to `SPECIFIC_ACCOUNTS`. + - Service `instagram_business`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `itunes_connect`: Accounts + - Service `linkedin_ads`: Specific Account IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. + - Service `spotify_ads`: The list of Ad Account IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. + - Service `tiktok_ads`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `twilio`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `twitter`: Specific accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. + - Service `twitter_ads`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. +- `accounts_reddit_ads` (Block Set) (see [below for nested schema](#nestedblock--config--accounts_reddit_ads)) +- `accounts_sync_mode` (String) Field usage depends on `service` value: + - Service `google_search_ads_360`: Whether to sync all accounts or specific. +- `action_breakdowns` (Set of String) Field usage depends on `service` value: + - Service `facebook`: List of action_breakdowns which connector will sync. [Possible action_breakdowns values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#actionbreakdowns). +- `action_report_time` (String) Field usage depends on `service` value: + - Service `facebook`: The report time of action stats. [Possible action_report time values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#actionreporttime). +- `ad_analytics` (String) Field usage depends on `service` value: + - Service `linkedin_ads`: Whether to sync all analytic reports or specific. Default value: `AllReports` +- `ad_unit_view` (String) Field usage depends on `service` value: + - Service `double_click_publishers`: Ad unit view for the report. +- `admin_api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `splitio`: Your Split admin api key. +- `adobe_analytics_configurations` (Block Set) (see [below for nested schema](#nestedblock--config--adobe_analytics_configurations)) +- `advertisables` (Set of String) Field usage depends on `service` value: + - Service `adroll`: Specific advertisables to sync. Must be populated if `sync_mode` is set to `SpecificAdvertisables`. +- `advertisers` (Set of String) Field usage depends on `service` value: + - Service `google_display_and_video_360`: The list of advertisers to include into a sync. This parameter only takes effect when `config_method` is set to `CREATE_NEW`. + - Service `google_search_ads_360`: Specific advertisers to sync. Must be populated if `advertisersSyncMode` is set to `SPECIFIC_ADVERTISERS`. + - Service `pinterest_ads`: Specific Advertisers to sync. Must be populated if `sync_mode` is set to `SpecificAdvertisers`. +- `advertisers_id` (Set of String) Field usage depends on `service` value: + - Service `yahoo_gemini`: Specific Advertiser IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. +- `advertisers_sync_mode` (String) Field usage depends on `service` value: + - Service `google_search_ads_360`: Whether to sync all or specific advertisers. +- `advertisers_with_seat` (Set of String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Specific Advertisers to sync. Must be populated if `sync_mode_advertiser` is set to `SPECIFIC_ADVERTISERS`. Pay attention to the format: `AdvertiserId:SeatId` +- `agent_config_method` (String) +- `agent_host` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The agent host. + - Service `db2i_sap_hva`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `hana_sap_hva_b1`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `hana_sap_hva_ecc`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `hana_sap_hva_ecc_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `hana_sap_hva_s4`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `hana_sap_hva_s4_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `oracle_hva`: The host of the agent. This is the same as database host, since the agent must be installed in the same machine as source database. + - Service `oracle_sap_hva`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `oracle_sap_hva_netweaver`: The host of the agent. This is the same as the database host, since the agent must be installed on the same machine as the source database. + - Service `sql_server_hva`: The host address of the machine running the agent. Often the same as the DB host. + - Service `sql_server_sap_ecc_hva`: The host address of the machine running the agent. Often the same as the DB host. +- `agent_ora_home` (String) Field usage depends on `service` value: + - Service `oracle_hva`: The home directory of the Oracle database. + - Service `oracle_sap_hva`: The home directory of the Oracle database. +- `agent_password` (String, Sensitive) Field usage depends on `service` value: + - Service `db2i_hva`: The agent password. + - Service `db2i_sap_hva`: The agent user's password. It must have a minimum length of 10 characters. + - Service `hana_sap_hva_b1`: The agent user's password. It must have a minimum length of 10 characters. + - Service `hana_sap_hva_ecc`: The agent user's password. It must have a minimum length of 10 characters. + - Service `hana_sap_hva_ecc_netweaver`: The agent user's password. It must have a minimum length of 10 characters. + - Service `hana_sap_hva_s4`: The agent user's password. It must have a minimum length of 10 characters. + - Service `hana_sap_hva_s4_netweaver`: The agent user's password. It must have a minimum length of 10 characters. + - Service `oracle_hva`: The agent user's password. It should have a minimum length of 10 characters. + - Service `oracle_sap_hva`: The agent user's password. It must have a minimum length of 10 characters. + - Service `oracle_sap_hva_netweaver`: The agent user's password. It must have a minimum length of 10 characters. + - Service `sql_server_hva`: The password for the agent user. + - Service `sql_server_sap_ecc_hva`: The password for the agent user. +- `agent_port` (Number) Field usage depends on `service` value: + - Service `db2i_hva`: The agent port. + - Service `db2i_sap_hva`: The port number of the agent. + - Service `hana_sap_hva_b1`: The port number of the agent. + - Service `hana_sap_hva_ecc`: The port number of the agent. + - Service `hana_sap_hva_ecc_netweaver`: The port number of the agent. + - Service `hana_sap_hva_s4`: The port number of the agent. + - Service `hana_sap_hva_s4_netweaver`: The port number of the agent. + - Service `oracle_hva`: The port number of the agent. + - Service `oracle_sap_hva`: The port number of the agent. + - Service `oracle_sap_hva_netweaver`: The port number of the agent. + - Service `sql_server_hva`: The port that the agent has open for Fivetran's connection. Default value is 4343. + - Service `sql_server_sap_ecc_hva`: The port that the agent has open for Fivetran's connection. Default value is 4343. +- `agent_public_cert` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The public certificate for the agent. + - Service `db2i_sap_hva`: The agent public certificate. + - Service `hana_sap_hva_b1`: The agent public certificate. + - Service `hana_sap_hva_ecc`: The agent public certificate. + - Service `hana_sap_hva_ecc_netweaver`: The agent public certificate. + - Service `hana_sap_hva_s4`: The agent public certificate. + - Service `hana_sap_hva_s4_netweaver`: The agent public certificate. + - Service `oracle_hva`: The agent public certificate. + - Service `oracle_sap_hva`: The agent public certificate. + - Service `oracle_sap_hva_netweaver`: The agent public certificate. + - Service `sql_server_hva`: The public certificate generated by the agent. + - Service `sql_server_sap_ecc_hva`: The public certificate generated by the agent. +- `agent_user` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The agent user name. + - Service `db2i_sap_hva`: The agent's user. + - Service `hana_sap_hva_b1`: The agent's user. + - Service `hana_sap_hva_ecc`: The agent's user. + - Service `hana_sap_hva_ecc_netweaver`: The agent's user. + - Service `hana_sap_hva_s4`: The agent's user. + - Service `hana_sap_hva_s4_netweaver`: The agent's user. + - Service `oracle_hva`: The agent's user. + - Service `oracle_sap_hva`: The agent's user. + - Service `oracle_sap_hva_netweaver`: The agent's user. + - Service `sql_server_hva`: The agent's username. + - Service `sql_server_sap_ecc_hva`: The agent's username. +- `aggregation` (String) Field usage depends on `service` value: + - Service `facebook`: Options to select aggregation duration. [Possible aggregation values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#aggregation). +- `agreement_grant_token` (String, Sensitive) Field usage depends on `service` value: + - Service `economic`: Your E-conomic X-AgreementGrantToken goes here. +- `always_encrypted` (Boolean) Field usage depends on `service` value: + - Service `aurora`: Require TLS through Tunnel + - Service `aurora_postgres`: Require TLS through Tunnel + - Service `azure_cosmos_for_mongo`: Require TLS + - Service `azure_postgres`: Require TLS through Tunnel + - Service `azure_sql_db`: Require TLS through Tunnel. + - Service `azure_sql_managed_db`: Require TLS. + - Service `clarity`: Require TLS through Tunnel. + - Service `cockroachdb`: Require TLS + - Service `db2i_hva`: Require TLS through Tunnel + - Service `db2i_sap_hva`: Require TLS through Tunnel + - Service `documentdb`: Require TLS encryption. + - Service `dynamics_365_fo`: Require TLS through Tunnel. + - Service `ehr`: Require TLS through Tunnel. + - Service `elastic_cloud`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. + - Service `es_self_hosted`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. + - Service `google_cloud_mysql`: Require TLS through Tunnel + - Service `google_cloud_postgresql`: Require TLS through Tunnel + - Service `google_cloud_sqlserver`: Require TLS. + - Service `heroku_postgres`: Require TLS through Tunnel + - Service `magento_mysql`: Require TLS through Tunnel + - Service `magento_mysql_rds`: Require TLS through Tunnel + - Service `maria`: Require TLS through Tunnel + - Service `maria_azure`: Require TLS through Tunnel + - Service `maria_rds`: Require TLS through Tunnel + - Service `mongo`: Require TLS + - Service `mongo_sharded`: Require TLS through Tunnel + - Service `mysql`: Require TLS through Tunnel + - Service `mysql_azure`: Require TLS through Tunnel + - Service `mysql_rds`: Require TLS through Tunnel + - Service `opendistro`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. + - Service `opensearch`: Default value: true. Set to false if TLS is not required when using an SSH tunnel. + - Service `oracle`: Require TLS through Tunnel + - Service `oracle_ebs`: Require TLS through Tunnel + - Service `oracle_hva`: Require TLS through Tunnel + - Service `oracle_rac`: Require TLS through Tunnel + - Service `oracle_rds`: Require TLS through Tunnel + - Service `oracle_sap_hva`: Require TLS through Tunnel + - Service `oracle_sap_hva_netweaver`: Require TLS. + - Service `postgres`: Require TLS through Tunnel + - Service `postgres_rds`: Require TLS through Tunnel + - Service `sql_server`: Require TLS. + - Service `sql_server_hva`: Require TLS. + - Service `sql_server_rds`: Require TLS. + - Service `sql_server_sap_ecc_hva`: Require TLS. +- `api` (String) Field usage depends on `service` value: + - Service `freshsuccess`: Set this parameter to `api`. +- `api_access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `shopify`: API access token of your custom app. + - Service `square`: The Square API access token of your application. +- `api_environment` (String) Field usage depends on `service` value: + - Service `afterpay`: Your Afterpay API environment. +- `api_id` (String) Field usage depends on `service` value: + - Service `aircall`: Your Aircall API ID. +- `api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `15five`: Your 15five API key. + - Service `360learning`: Your 360Learning API Key. + - Service `6sense`: Your 6sense API Key. + - Service `activecampaign`: Your ActiveCampaign API key. + - Service `affinity`: Your Affinity API key. + - Service `airtable`: API key of the Airtable account. + - Service `algolia`: Your Algolia API key. + - Service `anvyl`: Your Anvyl API key. + - Service `appcues`: Your Appcues API key. + - Service `assembled`: Your Assembled API key. + - Service `atlassian_jira_align`: Your Jira Align API key. + - Service `atlassian_ops_genie`: Your Opsgenie API key + - Service `attentive`: Your Attentive API key. + - Service `aumni`: Your Aumni API key. + - Service `avantlink`: Your AvantLink API key. + - Service `ballotready`: Your BallotReady API token. + - Service `bamboohr`: Your API Key. + - Service `bazaarvoice`: Your Bazaarvoice API key. + - Service `betterworks`: Your Betterworks API key. + - Service `bizzabo`: Your Bizzabo API key. + - Service `brave_ads`: Your Brave Ads API key + - Service `braze`: Your Braze API Key. + - Service `brevo`: Your Brevo API key. + - Service `bubble`: Your Bubble API token. + - Service `buildium`: Your Buildium private API key. + - Service `callrail`: Your CallRail API key. + - Service `campaignmonitor`: Your Campaign Monitor API key. + - Service `canny`: Your Canny API key. + - Service `chargebee_product_catalog_1`: Your Chargebee Product Catalog 1 API key. + - Service `chargebee_product_catalog_2`: Your Chargebee API key. + - Service `chartmogul`: Your ChartMogul API key. + - Service `chorusai`: Your Chorus API key. + - Service `churnkey`: Your Churnkey API Key. + - Service `churnzero`: Your ChurnZero API key. + - Service `cimis`: Your Cimis API key. + - Service `circleci`: Your CircleCI API Key. + - Service `clickup`: Your ClickUp API key. + - Service `close`: Your Close API key. + - Service `cloudbeds`: Your Cloudbeds API key. + - Service `clubspeed`: Your Clubspeed API key. + - Service `coassemble`: Your Coassemble API key. + - Service `codefresh`: Your Codefresh API Key. + - Service `column`: Your Column API key. + - Service `concord`: Your Concord API key. + - Service `confluent_cloud`: API Key + - Service `contrast_security`: Your Contrast Security API Key. + - Service `copper`: Your Copper API key. + - Service `coupa`: Your Coupa API key. + - Service `datadog`: Your Datadog API key. + - Service `dbt_cloud`: Your dbt Cloud service token. + - Service `dcl_logistics`: Your DCL Logistics API key. + - Service `delighted`: API Key for your Delighted account + - Service `destini`: Your Destini API Key. + - Service `donus`: Your Donus API key. + - Service `doorloop`: Your DoorLoop API key. + - Service `drata`: Your Drata API Key. + - Service `dropbox_sign`: Your Dropbox Sign API key. + - Service `duoplane`: Your Duoplane API key. + - Service `easypost`: Your EasyPost API Key. + - Service `electronic_tenant_solutions`: Your Electronic Tenant Solutions API key. + - Service `eventsforce`: Your Eventsforce API secret key. + - Service `everhour`: Your Everhour API Token. + - Service `factorial`: Your Factorial API key. + - Service `firehydrant`: Your FireHydrant API key. + - Service `float`: Your Float API key. + - Service `forj_community`: Your Forj Community API key. + - Service `fourkites`: Your FourKites API key. + - Service `freightview`: Your Freightview API key. + - Service `freshdesk`: Your Freshdesk API Key. + - Service `freshdesk_contact_center`: Your Freshdesk Contact Center API key. + - Service `freshsales`: Your Freshsales API key. + - Service `freshservice`: Your Freshservice API Key. + - Service `freshsuccess`: Your Freshsuccess API key. + - Service `freshteam`: Your Freshteam API key. + - Service `friendbuy`: Your Friendbuy API key. + - Service `fullstory`: Your Fullstory API key. + - Service `gainsight_product_experience`: Your Gainsight Product Experience API key. + - Service `gem`: Your Gem API key. + - Service `gorgias`: Your Gorgias API key. + - Service `greenhouse`: Your Greenhouse API key. + - Service `grepsr`: Your Grepsr API Key. + - Service `grin`: Your Grin API key. + - Service `happyfox`: Your HappyFox API key. + - Service `height`: Your Height API key. + - Service `helpshift`: Your Helpshift API Key. + - Service `incidentio`: Your incident.io API key. + - Service `infobip`: Your Infobip API key. + - Service `insightly`: Your Insightly API key. + - Service `integrate`: Your Integrate API key. + - Service `invoiced`: Your Invoiced API key. + - Service `iterable`: Your Iterable API key. + - Service `ivanti`: Your Ivanti API Key. + - Service `jotform`: Your Jotform API key. + - Service `justcall`: Your JustCall API key. + - Service `katana`: Your Katana API key. + - Service `kevel`: Your Kevel API key. + - Service `keypay`: Your KeyPay API key. + - Service `kisi`: Your Kisi API key. + - Service `klaviyo`: Your Klaviyo API key. + - Service `learnupon`: Your Learnupon API key. + - Service `lemlist`: Your Lemlist API key. + - Service `lever`: Your Lever API key. + - Service `liftoff`: Your Liftoff API key. + - Service `linear`: Your Linear API key. + - Service `linksquares`: Your LinkSquares API key. + - Service `lob`: Your Lob API key. + - Service `loop`: Your Loop API key. + - Service `luma`: Your Luma API key. + - Service `mailgun`: Your Mailgun API key. + - Service `mambu`: Your Mambu API key. + - Service `mandrill`: Your Mandrill API key. + - Service `maxio_chargify`: Enter Your API Key. + - Service `messagebird`: Your MessageBird API key. + - Service `mountain`: Your MNTN API key. + - Service `myosh`: Your myosh API key. + - Service `okendo`: Your Okendo API key. + - Service `ometria`: Your Ometria API Key. + - Service `oncehub`: Your OnceHub API key. + - Service `ordway`: Your Ordway API key. + - Service `ortto`: Your Ortto API key. + - Service `pagerduty`: Your PagerDuty API key. + - Service `papershift`: Your Papershift API Key + - Service `partnerize`: Your Partnerize user API key. + - Service `persona`: Your Persona API key. + - Service `picqer`: Your Picqer API key. + - Service `pinpoint`: Your Pinpoint API key. + - Service `pipe17`: The Pipe17 API key. + - Service `placerai`: Your Placer.ai API key. + - Service `playvox`: Your Playvox API Key. + - Service `posthog`: Your PostHog API key. + - Service `prive`: Your Prive API key. + - Service `qualaroo`: Your Qualaroo API Key. + - Service `quorum`: Your Quorum API key. + - Service `rebound_returns`: Your ReBound Returns API key. + - Service `recurly`: The Recurly API key. + - Service `replyio`: Your Reply API key. + - Service `revenuecat`: Your RevenueCat API key. + - Service `reviewsio`: Your REVIEWS.io API key. + - Service `revops`: Your RevOps bearer token. + - Service `ricochet360`: Your Ricochet360 API key. + - Service `ringover`: Your Ringover API key. + - Service `rippling`: Your Rippling API key. + - Service `rocketlane`: Your Rocketlane API key. + - Service `rootly`: Your Rootly API key. + - Service `safebase`: Your SafeBase API key. + - Service `sage_hr`: Your Sage HR API key. + - Service `sailthru`: The Sailthru API key. + - Service `salsify`: Your Salsify API Key. + - Service `security_journey`: Your Security Journey API key. + - Service `sendgrid`: The SendGrid API key. + - Service `sendinblue`: Your Sendinblue API key. + - Service `shortcut`: Your Shortcut API token. + - Service `shortio`: Your Short.io API key. + - Service `simplesat`: Your Simplesat API key. + - Service `sistrix`: Your SISTRIX API key. + - Service `skilljar`: Your Skilljar API key. + - Service `smartwaiver`: Your Smartwaiver API key. + - Service `snyk`: Your Snyk API key. + - Service `sonarqube`: Your Sonarqube API key. + - Service `sparkpost`: Your SparkPost API key. + - Service `squarespace`: Your Squarespace API key. + - Service `stackadapt`: Your StackAdapt API key. + - Service `statuspage`: Your Statuspage API Key. + - Service `stripe`: Restricted API key + - Service `stripe_test`: Restricted API key + - Service `subscript`: Your Subscript API key. + - Service `survicate`: Your Survicate API Key. + - Service `teads`: Your Teads API key. + - Service `teamtailor`: Your Teamtailor API key. + - Service `testrail`: Your TestRail API key. + - Service `ticket_tailor`: Your Ticket Tailor API key. + - Service `transcend`: Your Transcend API Key. + - Service `trello`: Your TRELLO api key. + - Service `uppromote`: Your UpPromote API key. + - Service `veeqo`: Your Veeqo API key. + - Service `visit_by_ges`: Your Visit by GES API key. + - Service `vitally`: Your Vitally API key. + - Service `vonage`: Your Vonage API Key. + - Service `vts`: Your VTS API key. + - Service `webconnex`: Your Webconnex API key. + - Service `xsolla`: Your Xsolla API key. + - Service `yougov_sport`: Your Yougov Sport API key. + - Service `zingtree`: Your Zingtree API key. +- `api_key_api_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `revel`: Your Revel Systems API Key and API Secret. +- `api_keys` (Set of String, Sensitive) Field usage depends on `service` value: + - Service `mandrill`: Comma-separated list of API keys. Required if `use_api_keys` is set to `true`. +- `api_password` (String, Sensitive) Field usage depends on `service` value: + - Service `duoplane`: Your Duoplane API password. +- `api_quota` (Number) Field usage depends on `service` value: + - Service `marketo`: Allowed number of API requests to Marketo instance per day, the default value is 10000. +- `api_requests_per_minute` (Number) Field usage depends on `service` value: + - Service `qualtrics`: Allowed number of API requests to Qualtrics per minute, the default value is 2000. Maximum allowed number is 3000 because brands may make up to 3000 API requests per minute across all of its API calls. +- `api_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `confluent_cloud`: API Secret + - Service `forj_community`: Your Forj Community API secret. + - Service `friendbuy`: Your Friendbuy API secret. + - Service `justcall`: Your JustCall API Secret. + - Service `liftoff`: Your Liftoff API secret. + - Service `mixpanel`: Mixpanel API Secret. + - Service `qualaroo`: Your Qualaroo API Secret. + - Service `sailthru`: The Sailthru API secret. + - Service `vonage`: Your Vonage API Secret. +- `api_secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `alchemer`: Your Alchemer API Secret key. +- `api_server` (String) Field usage depends on `service` value: + - Service `sigma_computing_source`: Your Sigma Computing api server. +- `api_token` (String, Sensitive) Field usage depends on `service` value: + - Service `aha`: Your Aha! API key. + - Service `aircall`: Your Aircall API Token. + - Service `appsflyer`: API Token for AppsFlyer's PULL API. + - Service `awin`: Your Awin API Token. + - Service `brex`: Your Brex API token + - Service `buildkite`: Your Buildkite API token. + - Service `buzzsprout`: Your Buzzsprout API token. + - Service `centra`: Your Centra API Token. + - Service `chameleon`: Your Chameleon API token. + - Service `clari`: Your Clari API token. + - Service `confluence`: The Confluence API token. + - Service `dixa`: Your Dixa API token. + - Service `drip`: Your Drip API Token. + - Service `factbird`: Your Factbird API token. + - Service `fone_dynamics`: Your Fone Dynamics API token. + - Service `fountain`: Your Fountain API token. + - Service `g2`: Your G2 API token. + - Service `gladly`: Your Gladly API Token. + - Service `hibob`: Your Hibob API token. + - Service `kandji`: Your Kandji API token. + - Service `livechat`: Your Livechat Access token. + - Service `livechat_partner`: Your Livechat Partner API Token. + - Service `maxio_saasoptics`: Your Maxio SaaSOptics API token. + - Service `megaphone`: Your Megaphone API token. + - Service `missive`: Your Missive API token. + - Service `mixmax`: Mixmax API token. + - Service `okta`: Your Okta API token. + - Service `ordway`: Your Ordway API token. + - Service `pipedrive`: (Optional)Your Pipedrive personal API token + - Service `pivotal_tracker`: Pivotal Tracker API token. + - Service `postmark`: Your Postmark account API token. + - Service `productive`: Your Productive API token. + - Service `qualtrics`: API token of the Qualtrics account. + - Service `rakutenadvertising`: Your Rakuten Advertising API token. + - Service `recharge`: The Recharge API token. + - Service `referralhero`: Your Referralhero API token. + - Service `resource_management_by_smartsheet`: Your Resource Management by Smartsheet API token. + - Service `retently`: Your Retently API token. + - Service `rundeck`: Your Rundeck API token. + - Service `safetyculture`: Your SafetyCulture API token. + - Service `sensor_tower`: Your Sensor Tower API token. + - Service `simplecast`: Your Simplecast API token. + - Service `snyk`: Your Snyk API token. + - Service `textus`: Your TextUs API token. + - Service `toggl_track`: Your Toggl Track API token + - Service `trello`: Your TRELLO api token. + - Service `trisolute`: Your Trisolute API token. + - Service `vwo`: Your VWO API token. + - Service `web_scraper`: Your Web Scraper API token. + - Service `zendesk`: Zendesk API tokens are auto-generated passwords in the Support admin interface. + - Service `zendesk_sunshine`: Zendesk API tokens are auto-generated passwords in the Support admin interface. +- `api_type` (String) +- `api_url` (String) Field usage depends on `service` value: + - Service `braze`: Your Braze API URL. +- `api_usage` (String) Field usage depends on `service` value: + - Service `zendesk`: Maximum Zendesk Api Usage allowed +- `api_user_identifier` (String, Sensitive) Field usage depends on `service` value: + - Service `shipnetwork`: Your ShipNetwork API user identifier. +- `api_user_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `shipnetwork`: Your ShipNetwork API user secret. +- `api_utilization_percentage` (String) Field usage depends on `service` value: + - Service `kustomer`: Api Utilization Percentage +- `api_version` (String) Field usage depends on `service` value: + - Service `pardot`: API Version +- `app_id` (String, Sensitive) Field usage depends on `service` value: + - Service `churnkey`: Your Churnkey APP ID. + - Service `open_exchange_rates`: Your Open Exchange Rates App Id. +- `app_ids` (Set of String) Field usage depends on `service` value: + - Service `pendo`: Specific App IDs to sync. Must be populated if `sync_mode` is set to `SpecificAppIds`. +- `app_ids_appsflyer` (Block Set) (see [below for nested schema](#nestedblock--config--app_ids_appsflyer)) +- `app_key` (String, Sensitive) Field usage depends on `service` value: + - Service `loopio`: Your Loopio App Key. + - Service `servicetitan`: Your ServiceTitan app key. + - Service `yotpo`: Your Yotpo App Key +- `app_reference` (String) Field usage depends on `service` value: + - Service `brightpearl`: Your Brightpearl app reference. +- `app_secret_token` (String, Sensitive) +- `app_specific_password` (String, Sensitive) Field usage depends on `service` value: + - Service `itunes_connect`: Your app-specific password +- `app_sync_mode` (String) Field usage depends on `service` value: + - Service `itunes_connect`: Whether to sync all apps or specific apps. +- `append_file_option` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: If you know that the source completely overwrites the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `azure_blob_storage`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `box`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `dropbox`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `ftp`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `gcs`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `google_drive`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `kinesis`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `s3`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `sftp`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `share_point`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. + - Service `wasabi_cloud_storage`: If you know that the source completely over-writes the same file with new data, you can append the changes instead of upserting based on filename and line number. +- `application_id` (String) Field usage depends on `service` value: + - Service `algolia`: Your Algolia application ID. +- `application_key` (String, Sensitive) Field usage depends on `service` value: + - Service `cin7core`: Your Cin7 Core application key. + - Service `datadog`: Your Datadog application key. + - Service `dear`: Your Dear Application key. + - Service `partnerize`: Your Partnerize user application key. +- `apps` (Set of String) Field usage depends on `service` value: + - Service `itunes_connect`: Specific apps to sync. Must be populated if `app_sync_mode` is set to `SpecificApps`. +- `archive_log_format` (String) Field usage depends on `service` value: + - Service `sql_server_hva`: Format for archive log file names + - Service `sql_server_sap_ecc_hva`: Format for archive log file names +- `archive_log_path` (String) Field usage depends on `service` value: + - Service `sql_server_hva`: Directory where archive logs are located + - Service `sql_server_sap_ecc_hva`: Directory where archive logs are located +- `archive_pattern` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `azure_blob_storage`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `box`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `dropbox`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `email`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `ftp`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `gcs`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `google_drive`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `kinesis`: Optional. Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `s3`: Files inside compressed archives with filenames matching this regular expression will be synced. + - Service `sftp`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `share_point`: Files inside of compressed archives with filenames matching this regular expression will be synced. + - Service `wasabi_cloud_storage`: Files inside compressed archives with filenames matching this regular expression will be synced. +- `are_soap_credentials_provided` (Boolean) Field usage depends on `service` value: + - Service `marketo`: Marketo SOAP credentials provided. +- `asb_ip` (String) Field usage depends on `service` value: + - Service `azure_service_bus`: The IP address (or) the URL of ASB namespace +- `asm_option` (Boolean) Field usage depends on `service` value: + - Service `oracle_hva`: Default value: `false`. Set to `true` if you're using ASM on a non-RAC instance. + - Service `oracle_sap_hva`: Default value: `false`. Set to `true` if you are using ASM on a non-RAC instance. +- `asm_oracle_home` (String) Field usage depends on `service` value: + - Service `oracle_hva`: ASM Oracle Home path. + - Service `oracle_sap_hva`: The Oracle ASM home directory. +- `asm_password` (String, Sensitive) Field usage depends on `service` value: + - Service `oracle_hva`: ASM password. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. + - Service `oracle_sap_hva`: The ASM user's password. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. +- `asm_tns` (String) Field usage depends on `service` value: + - Service `oracle_hva`: ASM TNS. + - Service `oracle_sap_hva`: ASM TNS. +- `asm_user` (String) Field usage depends on `service` value: + - Service `oracle_hva`: ASM user. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. + - Service `oracle_sap_hva`: The ASM user. Mandatory if `use_oracle_rac` or `asm_option` is set to `true`. +- `attribution_window` (String) Field usage depends on `service` value: + - Service `amazon_ads`: Time period used to attribute conversions based on clicks. +- `attribution_window_size` (String) Field usage depends on `service` value: + - Service `tiktok_ads`: Rollback sync duration to capture conversions. Set this to your configured attribution window in TikTok Ads. The default value is 7 days. +- `audience` (String, Sensitive) Field usage depends on `service` value: + - Service `auth0`: Your Auth0 API audience. +- `auth` (String) Field usage depends on `service` value: + - Service `redshift_db`: Password-based authentication type + - Service `snowflake_db`: Password-based or key-based authentication type +- `auth_code` (String, Sensitive) Field usage depends on `service` value: + - Service `happyfox`: Your HappyFox auth code. +- `auth_environment` (String) Field usage depends on `service` value: + - Service `younium`: Your Younium auth environment. +- `auth_method` (String) Field usage depends on `service` value: + - Service `azure_sql_db`: Authentication Method. + - Service `azure_sql_managed_db`: Authentication Method. + - Service `webhooks`: The authentication mechanism you want to use +- `auth_mode` (String) Field usage depends on `service` value: + - Service `anaplan`: The Anaplan authentication method. + - Service `concur`: The Authentication Mode used by SAP Concur. It can be PasswordGrant or CompanyLevel auth mode + - Service `github`: Authorization type. +- `auth_token` (String, Sensitive) Field usage depends on `service` value: + - Service `zonka_feedback`: Your Zonka Feedback auth token. +- `auth_type` (String) Field usage depends on `service` value: + - Service `airtable`: Type of authentication being used by connector + - Service `aws_cost_report`: Access approach + - Service `azure_service_bus`: The authentication mode to access the topic + - Service `dynamics_365`: Authentication mechanism. Either one of `OAUTH2`, or `SERVICE_PRINCIPAL`. Default value `OAUTH2` + - Service `gcs`: Authorization type. Required for storage bucket authentication. + - Service `google_sheets`: The `OAuth` value must be specified for this type of authorization. + - Service `jira`: Authorization type. + - Service `mixpanel`: Authentication Method + - Service `pardot`: Authenticate using OAuth or HTTP Basic + - Service `qualtrics`: Type of authentication being used by connector + - Service `s3`: Access approach + - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage Access approach. Required for connector creation. Default value: `ACCESS_KEY`. +- `authentication_method` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: Authentication Method + - Service `elastic_cloud`: The authentication method used to connect to your cluster. + - Service `es_self_hosted`: The authentication method used to connect to your cluster. + - Service `opendistro`: The authentication method used to connect to your cluster. + - Service `opensearch`: The authentication method used to connect to your cluster. +- `aws_region_code` (String) Field usage depends on `service` value: + - Service `dynamodb`: The AWS region code for the DynamoDB instance, e.g. `us-east-1`. +- `backint_configuration_path` (String) +- `backint_executable_path` (String) +- `base_currency` (String) Field usage depends on `service` value: + - Service `open_exchange_rates`: Your Open Exchange Rates Base Currency. +- `base_domain` (String) Field usage depends on `service` value: + - Service `freshteam`: Your company's Freshteam base domain name (usually **company**.freshteam.com). +- `base_id` (String) Field usage depends on `service` value: + - Service `airtable`: ID of base in Airtable +- `base_url` (String) Field usage depends on `service` value: + - Service `aha`: Your Aha! subdomain. + - Service `billing_platform`: Your BillingPlatform subdomain. + - Service `boostr`: Your Boostr base URL. + - Service `brex`: Your Brex Base URL + - Service `centra`: Your Centra Base URL. + - Service `culture_amp`: Your Culture Amp base URL. + - Service `financial_force`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. + - Service `freshsales`: Your Freshsales product. + - Service `gongio`: Your Gong API Base URL. + - Service `ironclad`: Your Ironclad base url. + - Service `jotform`: Your Jotform base URL. + - Service `mailgun`: Your Mailgun base URL. + - Service `ortto`: Your Ortto base URL. Possible values: `api`, `api.au`, `api.eu`. + - Service `prisma_cloud`: Your Prisma Cloud admin console URL. + - Service `salesforce`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. + - Service `salesforce_sandbox`: (Optional) The custom Salesforce domain. Make sure that the `base_url` starts with `https://`. + - Service `veevavault`: Your Veeva Vault base URL. + - Service `vitally`: Your Vitally base URL. +- `bearer_token` (String, Sensitive) Field usage depends on `service` value: + - Service `ada`: Your Ada API Access Token. + - Service `crowddev`: Your crowd.dev Auth Token. + - Service `customerio`: Your Customer.io App API Key. + - Service `freshchat`: Your Freshchat API Token. + - Service `hopin`: Your Hopin API key. + - Service `orbit`: Your Orbit API Token. + - Service `productboard`: Your Productboard API key. + - Service `smarthr`: Your SmartHR access token. + - Service `sprout`: Your Sprout Social API Access Token. + - Service `zenefits`: Your Zenefits bearer token. +- `blob_sas_url` (String, Sensitive) Field usage depends on `service` value: + - Service `webhooks`: The blob SAS URL of your Azure container. Required if `bucket_service` is set to `AZURE`. +- `blockchain` (String) Field usage depends on `service` value: + - Service `rarible`: Your Rarible Blockchain. +- `brand_id` (String) Field usage depends on `service` value: + - Service `oracle_moat_analytics`: Your Oracle Moat Analytics Brand ID. +- `breakdowns` (Set of String) Field usage depends on `service` value: + - Service `facebook`: List of breakdowns which connector will sync. [Possible breakdowns values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#breakdowns). +- `bucket` (String) Field usage depends on `service` value: + - Service `appsflyer`: Customer S3 Bucket + - Service `aws_cloudtrail`: The AWS bucket name which is configured for AWS CloudTrail. + - Service `aws_cost_report`: The S3 bucket name with the AWS Cost and Usage Report + - Service `aws_inventory`: The AWS bucket name that is configured for AWS Config. + - Service `aws_lambda`: S3 Bucket + - Service `cloudfront`: The bucket name for CloudFront. + - Service `gcs`: The name of the GCS bucket. + - Service `google_play`: The Google Cloud Storage source bucket. + - Service `heap`: The S3 bucket name. + - Service `kinesis`: The name of the Kinesis bucket. + - Service `s3`: The S3 bucket name. Required for connector creation. + - Service `segment`: The name of the Segment bucket. Must be populated if `sync_type` is set to `S3`. + - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage bucket name. Required for connector creation. +- `bucket_name` (String) Field usage depends on `service` value: + - Service `adjust`: Your AWS S3 or GCS bucket. + - Service `google_analytics_360`: The name of the bucket. + - Service `google_analytics_4_export`: The name of the bucket. +- `bucket_service` (String) Field usage depends on `service` value: + - Service `webhooks`: Whether to store the events in Fivetran's container service or your S3 bucket. Default value: `Fivetran`. +- `business_accounts` (Set of String) Field usage depends on `service` value: + - Service `reddit_ads`: Specific Accounts to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. +- `business_id` (String, Sensitive) Field usage depends on `service` value: + - Service `birdeye`: Your Birdeye Business ID. +- `business_unit_id` (String) Field usage depends on `service` value: + - Service `pardot`: Business Unit Id +- `catalog` (String) Field usage depends on `service` value: + - Service `databricks_db`: catalog to sync +- `certificate` (String, Sensitive) Field usage depends on `service` value: + - Service `anaplan`: The contents of your PEM certificate file. Must be populated if `auth_mode` is set to `Certificate`. + - Service `qualtrics`: Your Client Certificate +- `click_attribution_window` (String) Field usage depends on `service` value: + - Service `facebook`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#clickattributionwindow). + - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'click' action. +- `client` (String) +- `client_cert` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka client certificate. + - Service `heroku_kafka`: Heroku Kafka client certificate. Required for `TLS` security protocol. +- `client_cert_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka client certificate key. + - Service `heroku_kafka`: Heroku Kafka client certificate key. Required for `TLS` security protocol. +- `client_host` (String) Field usage depends on `service` value: + - Service `ceridian_dayforce`: Your Ceridian Dayforce Client Host. +- `client_id` (String, Sensitive) Field usage depends on `service` value: + - Service `acumatica`: Your Acumatica client ID. + - Service `adobe_analytics`: Client ID from the OAuth Server-to-Server or Service Account (JWT) credentials of your Adobe Project. + - Service `adobe_workfront`: Your Adobe Workfront client ID. + - Service `adp_workforce_now`: Your ADP Client ID. + - Service `akamai`: Your Akamai client ID. + - Service `auth0`: Your Auth0 client ID. + - Service `billing_platform`: Your BillingPlatform client ID. + - Service `brightcove`: Your Brightcove client ID. + - Service `brightpearl`: Your Brightpearl client id. + - Service `buildium`: Your Buildium API client ID. + - Service `canvas_by_instructure`: Your Canvas by Instructure client ID. + - Service `castor_edc`: Your Castor EDC client Id. + - Service `commercetools`: Your commercetools client ID. + - Service `concur`: The SAP Concur Client ID. + - Service `coupa`: Your Coupa client_id + - Service `criteo`: Your Criteo Client ID. + - Service `criteo_retail_media`: Your Criteo Retail Media client ID. + - Service `culture_amp`: Your Culture Amp client ID. + - Service `cvent`: Your Cvent client ID. + - Service `d2l_brightspace`: Your D2L Brightspace client ID. + - Service `ebay`: Your eBay app ID. + - Service `exact_online`: Your Exact Online client ID. + - Service `flexport`: The Flexport API Key. + - Service `genesys`: Your Genesys client ID. + - Service `hana_sap_hva_ecc_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. + - Service `hana_sap_hva_s4_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. + - Service `ilevel`: Your iLevel Client ID. + - Service `instructure`: Your Instructure client ID. + - Service `integral_ad_science`: Your integral_ad_science client id. + - Service `jama_software`: Your Jama Software client ID. + - Service `looker_source`: Your Looker Client ID. + - Service `marketo`: Marketo REST API Client Id. + - Service `medallia`: Medallia Client ID + - Service `microsoft_entra_id`: Your Microsoft Entra ID Client ID. + - Service `microsoft_teams`: Your Microsoft Teams Client ID. + - Service `navan`: Your Navan client ID. + - Service `on24`: Your ON24 client ID. + - Service `oracle_sap_hva_netweaver`: Three-digit (000-999) identifier of the SAP client, which is sent to an AS ABAP upon logon. + - Service `paychex`: Your Paychex client ID. + - Service `paypal`: `Client ID` of your PayPal client application. + - Service `paypal_sandbox`: `Client ID` of your PayPal client application. + - Service `personio`: Your Personio Client ID. + - Service `piwik_pro`: Your Piwik PRO client ID. + - Service `podio`: Your Podio client ID. + - Service `power_reviews_enterprise`: Your PowerReviews Enterprise Client ID. + - Service `prisma_cloud`: Your Prisma Cloud access key ID. + - Service `procore`: Your Procore client ID. + - Service `quora_ads`: Your Quora Ads client ID. + - Service `reltio`: Your Reltio client ID. + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud Client ID. + - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud client ID. + - Service `salesloft`: `Client ID` of your Salesloft client application. + - Service `sap_success_factors`: Your SAP SuccessFactors Client ID. + - Service `servicenow`: ServiceNow Client ID. + - Service `servicetitan`: Your ServiceTitan client ID. + - Service `sharetribe`: Your Sharetribe client ID. + - Service `shipnetwork`: Your ShipNetwork client ID. + - Service `sigma_computing_source`: Your Sigma Computing client ID. + - Service `skillstx`: Your SkillsTX client ID. + - Service `smartrecruiters`: Your SmartRecruiters client ID. + - Service `splash`: Your Splash client ID. + - Service `square`: The Application ID of your organization. + - Service `standard_metrics`: Your Standard Metrics Client ID. + - Service `swoogo`: Your Swoogo client Id. + - Service `taboola`: The Taboola client ID. + - Service `talkdesk`: The Client ID of your OAuth Client + - Service `toast`: Your Toast client ID. + - Service `trelica`: Your Trelica client ID. + - Service `tymeshift`: Your Tymeshift email. + - Service `udemy_business`: Your Udemy Business client ID. + - Service `visma`: Your Visma client ID. + - Service `vonage_contact_center`: Your Vonage Contact Center client ID. + - Service `walmart_marketplace`: Your Walmart Marketplace client ID. + - Service `xero`: your clientId + - Service `xray`: Your Xray Client ID. + - Service `yougov_sport`: Your Yougov Sport client ID. + - Service `zendesk_chat`: Your Zendesk client ID. + - Service `zoho_books`: Your Zoho Books Client ID. + - Service `zoho_campaigns`: Your Zoho Campaigns Client ID. + - Service `zoho_desk`: Your Zoho Desk Client Id. + - Service `zoho_inventory`: Your Zoho Inventory client ID. + - Service `zuora`: Zuora Client ID. + - Service `zuora_sandbox`: Zuora Client ID. +- `client_key` (String, Sensitive) Field usage depends on `service` value: + - Service `appfigures`: Your Appfigures Client Key. + - Service `thinkific`: Your Thinkific client key. + - Service `yougov_sport`: Your Yougov Sport client key. +- `client_name` (String, Sensitive) Field usage depends on `service` value: + - Service `destini`: Your Destini Client Name. + - Service `medallia`: Medallia company name +- `client_namespace` (String) Field usage depends on `service` value: + - Service `ceridian_dayforce`: Your Ceridian Dayforce Client Namespace. +- `client_private_key` (String, Sensitive) Field usage depends on `service` value: + - Service `aurora_postgres`: Client Private Key in .pem format. + - Service `azure_postgres`: Client Private Key in .pem format. + - Service `google_cloud_postgresql`: Client Private Key in .pem format. + - Service `heroku_postgres`: Client Private Key in .pem format. + - Service `postgres`: Client Private Key in .pem format. + - Service `postgres_rds`: Client Private Key in .pem format. +- `client_public_certificate` (String) Field usage depends on `service` value: + - Service `aurora_postgres`: Client Certificate in .pem format. + - Service `azure_postgres`: Client Certificate in .pem format. + - Service `google_cloud_postgresql`: Client Certificate in .pem format. + - Service `heroku_postgres`: Client Certificate in .pem format. + - Service `postgres`: Client Certificate in .pem format. + - Service `postgres_rds`: Client Certificate in .pem format. +- `client_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `acumatica`: Your Acumatica client secret. + - Service `adobe_analytics`: Client Secret from the OAuth Server-to-Server or Service Account (JWT) credentials of your Adobe Project. + - Service `adobe_workfront`: Your Adobe Workfront client secret. + - Service `adp_workforce_now`: Your ADP Client Secret. + - Service `akamai`: Your Akamai client secret. + - Service `auth0`: Your Auth0 client Secret. + - Service `billing_platform`: Your BillingPlatform client secret. + - Service `brightcove`: Your Brightcove client secret. + - Service `brightpearl`: Your Brightpearl client secret. + - Service `canvas_by_instructure`: Your Canvas by Instructure client secret. + - Service `castor_edc`: Your Castor EDC Client Secret. + - Service `commercetools`: Your commercetools client secret. + - Service `concur`: The SAP Concur Client secret. + - Service `coupa`: Your Coupa client_id + - Service `criteo`: Your Criteo client secret key. + - Service `criteo_retail_media`: Your Criteo Retail Media client Secret. + - Service `culture_amp`: Your Culture Amp client secret. + - Service `cvent`: Your Cvent client secret. + - Service `d2l_brightspace`: Your D2L Brightspace client secret. + - Service `ebay`: Your eBay cert ID. + - Service `exact_online`: Your Exact Online client secret. + - Service `flexport`: The Flexport API Secret. + - Service `genesys`: Your Genesys client secret. + - Service `ilevel`: Your iLevel Client Secret. + - Service `instructure`: Your Instructure client secret. + - Service `integral_ad_science`: Your integral_ad_science client secret. + - Service `jama_software`: Your Jama Software client secret. + - Service `looker_source`: Your Looker Client Secret. + - Service `marketo`: Marketo REST API Client Secret. + - Service `medallia`: Medallia Client Secret key + - Service `microsoft_entra_id`: Your Microsoft Entra ID Client Secret. + - Service `microsoft_teams`: Your Microsoft Teams Client Secret. + - Service `navan`: Your Navan client secret. + - Service `paychex`: Your Paychex client secret. + - Service `personio`: Your Personio secret. + - Service `piwik_pro`: Your Piwik PRO client secret. + - Service `podio`: Your Podio client secret. + - Service `power_reviews_enterprise`: Your PowerReviews Enterprise Client Secret. + - Service `prisma_cloud`: Your Prisma Cloud secret access Key. + - Service `procore`: Your Procore client secret. + - Service `quora_ads`: Your Quora Ads client secret. + - Service `reltio`: Your Reltio client secret. + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud Client secret. + - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud client secret. + - Service `salesloft`: `Client Secret` of your Salesloft client application. + - Service `sap_success_factors`: Your SAP SuccessFactors Client Secret that you generated through SAML Assertion. + - Service `servicenow`: ServiceNow Client Secret. + - Service `servicetitan`: Your ServiceTitan secret key. + - Service `sharetribe`: Your Sharetribe client secret. + - Service `sigma_computing_source`: Your Sigma Computing client secret. + - Service `skillstx`: Your SkillsTX client secret. + - Service `smartrecruiters`: Your SmartRecruiters client secret. + - Service `splash`: Your Splash client secret. + - Service `square`: The Application Secret of your organization. + - Service `standard_metrics`: Your Standard Metrics Client secret. + - Service `swoogo`: Your Swoogo Client Secret. + - Service `taboola`: The Taboola client secret. + - Service `talkdesk`: The Client Secret of your OAuth Client + - Service `thinkific`: Your Thinkific client secret. + - Service `toast`: Your Toast client secret. + - Service `trelica`: Your Trelica client secret. + - Service `tymeshift`: Your Tymeshift password. + - Service `udemy_business`: Your Udemy Business client secret. + - Service `visma`: Your Visma client secret. + - Service `vonage_contact_center`: Your Vonage Contact Center client secret. + - Service `walmart_marketplace`: Your Walmart Marketplace client secret. + - Service `xero`: your clientSecret + - Service `xray`: Your Xray Client Secret. + - Service `zendesk_chat`: Your Zendesk client secret. + - Service `zoho_books`: Your Zoho Books Client Secret. + - Service `zoho_campaigns`: Your Zoho Campaigns Client Secret. + - Service `zoho_desk`: Your Zoho Desk Client secret. + - Service `zoho_inventory`: Your Zoho Inventory client secret. + - Service `zuora`: Zuora Client Secret. + - Service `zuora_sandbox`: Zuora Client Secret. +- `cloud_storage_type` (String) Field usage depends on `service` value: + - Service `braze`: Cloud storage type Braze Current is connected to. +- `collection_address` (String) Field usage depends on `service` value: + - Service `rarible`: Your Rarible Collection Address. +- `columns` (Set of String) Field usage depends on `service` value: + - Service `double_click_publishers`: Columns provide all trafficking statistics and revenue information available for the chosen Dimensions. +- `companies` (Set of String) Field usage depends on `service` value: + - Service `business_central`: List of companies to sync +- `company` (String) Field usage depends on `service` value: + - Service `ordway`: Your Ordway company name. +- `company_id` (String) Field usage depends on `service` value: + - Service `360learning`: Your 360Learning Company ID. + - Service `sage_intacct`: Company ID + - Service `sap_success_factors`: Your SAP SuccessFactors Company ID. +- `company_ids` (String) Field usage depends on `service` value: + - Service `cj_commission_detail`: Your CJ Commission Detail company IDs. +- `company_key` (String, Sensitive) Field usage depends on `service` value: + - Service `khoros_care`: Your Khoros Care companyKey. + - Service `upland`: Your Upland Software Company Key. +- `company_request_token` (String, Sensitive) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company Request Token +- `company_uuid` (String) Field usage depends on `service` value: + - Service `concur`: The SAP Concur Company UUID +- `compression` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: If your files are compressed, but do not have extensions indicating the compression method, you can force them to be uncompressed according to the selected compression algorithm. Leave the value as infer if your files are saved with the correct compression extensions. + - Service `azure_blob_storage`: The secrets that should be passed to the function at runtime. + - Service `box`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. + - Service `dropbox`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. + - Service `email`: The secrets that should be passed to the function at runtime. + - Service `ftp`: The secrets that should be passed to the function at runtime. + - Service `gcs`: The secrets that should be passed to the function at runtime. + - Service `google_drive`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. + - Service `kinesis`: If your files are compressed, but do not have extensions indicating the compression method, you can force them to be uncompressed according to the selected compression algorithm. Leave the value as infer if your files are saved with the correct compression extensions. + - Service `s3`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. + - Service `sftp`: The secrets that should be passed to the function at runtime. + - Service `share_point`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. + - Service `wasabi_cloud_storage`: The compression format is used to let Fivetran know that even files without a compression extension should be decompressed using the selected compression format. +- `config_method` (String) Field usage depends on `service` value: + - Service `google_display_and_video_360`: The report configuration method. Specifies whether a new configuration is defined manually or an existing configuration is reused. The default value is `CREATE_NEW`. +- `config_repository_url` (String) Field usage depends on `service` value: + - Service `snowplow`: Public repository URL containing JSON configuration files. +- `config_type` (String) Field usage depends on `service` value: + - Service `facebook`: Option to select Prebuilt Reports or Custom Reports. [Possible config_type values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#configtype). + - Service `google_analytics`: Whether to use the [Prebuilt Reports or Custom Reports](https://fivetran.com/docs/connectors/applications/google-analytics#schemainformation). +- `connecting_user` (String) +- `connecting_user_email` (String) +- `connection_method` (String) Field usage depends on `service` value: + - Service `aws_msk`: How Fivetran connects to your message brokers in the cluster + - Service `azure_blob_storage`: Connection Method. Possible values: `DIRECT`: Fivetran will connect directly to your storage container, `SSH_TUNNEL`: Fivetran will connect to your storage container using a host machine (commonly used for VPN connections), `PRIVATE_LINK`: Fivetran will connect to your storage container using PrivateLink. + - Service `azure_function`: Connection Method. Possible values: `DIRECT`: Fivetran will connect directly to your function, `PRIVATE_LINK`: Fivetran will connect to your storage container using PrivateLink. + - Service `azure_service_bus`: The connection method + - Service `sftp`: The connection method used to connect to SFTP Server. +- `connection_name` (String) Field usage depends on `service` value: + - Service `appsflyer`: Your Data Locker Connection Name. Default value: `data-locker-hourly/ +- `connection_string` (String, Sensitive) Field usage depends on `service` value: + - Service `azure_blob_storage`: The blob storage container connection string. + - Service `azure_event_hub`: Connection string of the Event Hub Namespace you want to sync. + - Service `azure_service_bus`: The connection string used for authentication. Required if the authentication type is `ConnectionString` + - Service `microsoft_dynamics_365_fno`: The blob storage container's connection string. + - Service `microsoft_dynamics_365_fo`: The blob storage container connection string. +- `connection_type` (String) Field usage depends on `service` value: + - Service `aurora`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `aurora_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `aws_cost_report`: Connection method. Default value: `Directly`. + - Service `aws_lambda`: Connection method. Default value: `Directly`. + - Service `azure_blob_storage`: Connection method. Default value: `Directly`. + - Service `azure_cosmos_for_mongo`: Possible values:`Directly`, `PrivateLink`. `Directly` is the default value + - Service `azure_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `azure_sql_db`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `azure_sql_managed_db`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `bigquery_db`: Direct or PrivateLink connection + - Service `clarity`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `cockroachdb`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `db2i_hva`: Possible values:`SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and the following parameter's values are specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. + - Service `db2i_sap_hva`: Possible values:`SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. + - Service `documentdb`: Possible values:`SshTunnel`, `PrivateLink` . `SshTunnel` is used as a value if this parameter is omitted in the request and the following parameter's values are specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. + - Service `dynamics_365_fo`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `ehr`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `elastic_cloud`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `email`: Connection method. Default value: `Directly`. + - Service `es_self_hosted`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `google_cloud_mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `google_cloud_postgresql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `google_cloud_sqlserver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `hana_sap_hva_b1`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `hana_sap_hva_ecc`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `hana_sap_hva_ecc_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `hana_sap_hva_s4`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `hana_sap_hva_s4_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `heroku_postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `kinesis`: Connection method. Default value: `Directly`. + - Service `magento_mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `magento_mysql_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `maria`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `maria_azure`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `maria_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `mongo`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `mongo_sharded`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `mysql`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `mysql_azure`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `mysql_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `opendistro`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `opensearch`: Possible values:`Directly`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_ebs`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_hva`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_rac`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_sap_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `oracle_sap_hva_netweaver`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `postgres`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `postgres_rds`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `redshift_db`: Directly or Private Link + - Service `s3`: Connection method. Default value: `Directly`. + - Service `sap_hana`: Possible values:`Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `sap_s4hana`: Connection Method + - Service `snowflake_db`: Directly or Private Link + - Service `sql_server`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `sql_server_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `sql_server_rds`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. + - Service `sql_server_sap_ecc_hva`: Possible values: `Directly`, `PrivateLink`, `SshTunnel`. `SshTunnel` is used as a value if this parameter is omitted in the request and any of the following parameter's values is specified: `tunnel_host`, `tunnel_port`, `tunnel_user`. Otherwise, `Directly` is used as a value if the parameter is omitted. +- `console_url` (String) Field usage depends on `service` value: + - Service `prisma_cloud`: Your Prisma Cloud console URL. +- `consumer_group` (String) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka consumer group name. + - Service `aws_msk`: The name of consumer group created for Fivetran. + - Service `azure_event_hub`: Name of consumer group created for Fivetran. + - Service `confluent_cloud`: Confluent Cloud consumer group name. + - Service `heroku_kafka`: Heroku Kafka consumer group name. +- `consumer_key` (String, Sensitive) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: Consumer Key + - Service `twitter`: API Key of your app + - Service `twitter_ads`: The Twitter App consumer key. + - Service `woocommerce`: Your WooCommerce Consumer key. +- `consumer_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: Consumer Secret + - Service `twitter`: API Secret of your app + - Service `twitter_ads`: The Twitter App consumer secret. + - Service `woocommerce`: Your WooCommerce Consumer secret. +- `container_address` (String) Field usage depends on `service` value: + - Service `azure_blob_storage`: IP address of the Azure Storage Container which is accessible from host machine. +- `container_name` (String) Field usage depends on `service` value: + - Service `azure_blob_storage`: The name of the blob container. + - Service `microsoft_dynamics_365_fno`: The name of the blob container. + - Service `microsoft_dynamics_365_fo`: The name of the blob container. +- `content_owner_id` (String) Field usage depends on `service` value: + - Service `youtube_analytics`: Used only for Content Owner reports. The ID of the content owner for whom the API request is being made. +- `conversation_webhook_url` (String) Field usage depends on `service` value: + - Service `helpscout`: Your conversation webhook URL +- `conversion_dimensions` (Set of String) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: Conversion Dimensions. +- `conversion_report_time` (String) Field usage depends on `service` value: + - Service `pinterest_ads`: The date that the user interacted with the ad OR completed a conversion event. +- `conversion_window_size` (Number) Field usage depends on `service` value: + - Service `google_ads`: A period of time in days during which a conversion is recorded. +- `convert_dats_type_to_date` (Boolean) +- `csv_definition` (String) Field usage depends on `service` value: + - Service `adjust`: CSV definition for the CSV export (https://help.adjust.com/en/article/csv-uploads#how-do-i-format-my-csv-definition). +- `currency` (String) Field usage depends on `service` value: + - Service `criteo`: Currency +- `custom_event_sync_mode` (String) Field usage depends on `service` value: + - Service `iterable`: Custom Events Sync Mode. +- `custom_events` (Set of String) Field usage depends on `service` value: + - Service `iterable`: List of custom events to sync. Should be specified when `custom_event_sync_mode` is `SelectedEvents` +- `custom_field_ids` (Set of String) Field usage depends on `service` value: + - Service `double_click_publishers`: The list of custom field IDs included in the report. Custom fields can only be selected with their corresponding dimensions. +- `custom_floodlight_variables` (Set of String) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: Custom Floodlight variables enable you to capture information beyond the basics (visits and revenue) that you can collect with standard parameters in your tags. +- `custom_payloads` (Block Set) (see [below for nested schema](#nestedblock--config--custom_payloads)) +- `custom_reports` (Block Set) (see [below for nested schema](#nestedblock--config--custom_reports)) +- `custom_tables` (Block Set) (see [below for nested schema](#nestedblock--config--custom_tables)) +- `custom_url` (String) Field usage depends on `service` value: + - Service `dbt_cloud`: Your dbt Cloud access url. + - Service `jotform`: Your Jotform custom base URL. +- `customer_api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `ukg_pro`: Your UKG Pro Customer API key. +- `customer_id` (String) Field usage depends on `service` value: + - Service `google_ads`: ID of the customer, can be retrieved from your AdWords dashboard. +- `customer_list_id` (String) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The parameter to retrieve customer details. +- `daily_api_call_limit` (Number) +- `data_access_method` (String) Field usage depends on `service` value: + - Service `cosmos`: The source data access method. Supported values:`ACCOUNT_KEY`- Data access method that uses account keys to authenticate to the source database. It comes in both read-write and read-only variants.`RESOURCE_TOKEN`- Fine-grained permission model based on native Azure Cosmos DB users and permissions. Learn more in our [Azure Cosmos DB Data Access Methods documentation](https://fivetran.com/docs/connectors/databases/cosmos#dataaccessmethods). +- `data_center` (String) Field usage depends on `service` value: + - Service `brightpearl`: Your Brightpearl data center. + - Service `qualtrics`: Data center ID of the Qualtrics account. Can be found in the URL before `qualtrics.com`. (For example, if your URL is `youraccount.ca1.qualtrics.com`, then the data center is `ca1`.) + - Service `zoho_crm`: Data Center, depending on the Domain name +- `data_center_id` (String) Field usage depends on `service` value: + - Service `zonka_feedback`: Your Zonka Feedback data center ID. +- `data_set_name` (String) Field usage depends on `service` value: + - Service `bigquery_db`: Data set name +- `database` (String) Field usage depends on `service` value: + - Service `aurora`: The database name. + - Service `aurora_postgres`: The database name. + - Service `azure_postgres`: The database name. + - Service `azure_sql_db`: The database name. + - Service `azure_sql_managed_db`: The database name. + - Service `clarity`: The database name. + - Service `cockroachdb`: The database name. + - Service `db2i_hva`: The database name. + - Service `db2i_sap_hva`: The database name. + - Service `dynamics_365_fo`: The database name. + - Service `ehr`: The database name. + - Service `google_cloud_mysql`: The database name. + - Service `google_cloud_postgresql`: The database name. + - Service `google_cloud_sqlserver`: The database name. + - Service `hana_sap_hva_b1`: The Hana database name. + - Service `hana_sap_hva_ecc`: The Hana database name. + - Service `hana_sap_hva_ecc_netweaver`: The Hana database name. + - Service `hana_sap_hva_s4`: The Hana database name. + - Service `hana_sap_hva_s4_netweaver`: The Hana database name. + - Service `heroku_postgres`: The database name. + - Service `magento_mysql`: The database name. + - Service `magento_mysql_rds`: The database name. + - Service `maria`: The database name. + - Service `maria_azure`: The database name. + - Service `maria_rds`: The database name. + - Service `mysql`: The database name. + - Service `mysql_azure`: The database name. + - Service `mysql_rds`: The database name. + - Service `oracle`: The database name. + - Service `oracle_ebs`: The database name. + - Service `oracle_hva`: The database name. + - Service `oracle_rac`: The database name. + - Service `oracle_rds`: The database name. + - Service `oracle_sap_hva`: The database name. + - Service `postgres`: The database name. + - Service `postgres_rds`: The database name. + - Service `redshift_db`: The database name: Redshift + - Service `sap_hana`: The database name. + - Service `sap_s4hana`: The database name. + - Service `snowflake_db`: The database name: Snowflake + - Service `sql_server`: The database name. + - Service `sql_server_hva`: The database name. + - Service `sql_server_rds`: The database name. + - Service `sql_server_sap_ecc_hva`: The database name. +- `database_name` (String) Field usage depends on `service` value: + - Service `firebase`: Database Name +- `dataset_id` (String) Field usage depends on `service` value: + - Service `google_analytics_360`: The dataset ID. + - Service `google_analytics_4_export`: The Dataset ID. +- `datasource` (String) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: The NetSuite data source value: `NetSuite.com`. +- `date_granularity` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: The aggregation duration you want. Default value: `HOUR` . +- `db` (String) Field usage depends on `service` value: + - Service `myosh`: Your Myosh Server variables/db . +- `delimiter` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. If your files sync with the wrong number of columns, consider setting this value + - Service `azure_blob_storage`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `box`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `dropbox`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `email`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `ftp`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `gcs`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `google_drive`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `kinesis`: Optional. You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. If your files sync with the wrong number of columns, consider setting this value + - Service `s3`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `sftp`: You can specify your the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `share_point`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. + - Service `wasabi_cloud_storage`: You can specify the delimiter that your CSVs use here. Fivetran generally tries to infer the delimiter, but in some cases this is impossible. +- `developer_reference` (String) Field usage depends on `service` value: + - Service `brightpearl`: Your Brightpearl developer reference. +- `dimension_attributes` (Set of String) Field usage depends on `service` value: + - Service `double_click_publishers`: Dimension attributes provide additional fields associated with a Dimension. Dimension attributes can only be selected with their corresponding Dimensions. +- `dimensions` (Set of String) Field usage depends on `service` value: + - Service `adroll`: The dimenstions that you want to sync. + - Service `double_click_campaign_manager`: Report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. The `advertiser` dimension is mandatory for `REACH` report type + - Service `double_click_publishers`: Report dimensions to include in the sync. The `date` dimension is mandatory for all the report types. + - Service `google_analytics`: The report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. + - Service `google_display_and_video_360`: The report dimensions (filters) to include into a sync. The dimension names are provided in the API format. This is a required parameter when `config_method` is set to `CREATE_NEW`. +- `direct_capture_method` (String) Field usage depends on `service` value: + - Service `oracle_hva`: Possible values:`DIRECT`, `BFILE`, `ASM`, `ARCHIVE_ONLY` + - Service `oracle_sap_hva`: Possible values:`DIRECT`, `BFILE`, `ASM`, `ARCHIVE_ONLY` +- `distributed_connector_cluster_size` (Number) Field usage depends on `service` value: + - Service `cosmos`: Specifies the total number of connectors in the Distributed Connector Cluster running in parallel. + - Service `dynamodb`: Specifies the total number of connectors in the Distributed Connector Cluster running in parallel. +- `domain` (String) Field usage depends on `service` value: + - Service `auth0`: Your Auth0 domain. + - Service `bubble`: Your Bubble app name or domain name. + - Service `confluence`: Your Confluence domain. + - Service `kustomer`: Domain is the beginning of your kustomer URL going before .kustomerapp.com, e.g. for yourcompany.kustomerapp.com the domain name is yourcompany + - Service `okta`: Your Okta domain. + - Service `pipedrive`: Your Pipedrive domain. + - Service `shopware`: Your Shopware domain. + - Service `sistrix`: Your SISTRIX domain. + - Service `solarwinds_service_desk`: Your SolarWinds Service Desk domain. + - Service `uservoice`: Domain of your UserVoice site. If it ends with ".uservoice.com", you can specify just the subdomain ("mydomain.uservoice.com" -†’ "mydomain") + - Service `zendesk`: Zendesk domain. + - Service `zendesk_sunshine`: Zendesk domain. +- `domain_host_name` (String) Field usage depends on `service` value: + - Service `workday`: Workday host name. + - Service `workday_financial_management`: Workday host name. + - Service `workday_hcm`: Workday host name. +- `domain_name` (String) Field usage depends on `service` value: + - Service `calabrio`: Your Calabrio domain name + - Service `dynamics_365`: The custom domain name associated with Dynamics 365. + - Service `helpshift`: Your Helpshift domain name. +- `domain_type` (String) Field usage depends on `service` value: + - Service `medallia`: Domain type of your Medallia URL +- `dsv_service_auth` (String, Sensitive) +- `dsv_subscription_key` (String, Sensitive) +- `ecommerce_stores` (Set of String) Field usage depends on `service` value: + - Service `mailchimp`: List of IDs of the Mailchimp E-Commerce Stores to Sync +- `elements` (Set of String) +- `email` (String) Field usage depends on `service` value: + - Service `appcues`: Your Appcues Email. + - Service `boostr`: Your Boostr email. + - Service `copper`: Your Copper email address. + - Service `email`: Send your emails to this address. + - Service `moloco`: Your Moloco account email. + - Service `netsuite_suiteanalytics`: The NetSuite user's email address. + - Service `pardot`: The email of the Pardot user. + - Service `skuvault`: Your SkuVault email. + - Service `smadex`: Your Smadex account's email ID. + - Service `zendesk`: Zendesk email. + - Service `zendesk_sunshine`: Zendesk email. +- `email_id` (String) Field usage depends on `service` value: + - Service `ordway`: Your Ordway user email ID. + - Service `planful`: Your Planful email ID. +- `empty_header` (Boolean) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `azure_blob_storage`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `box`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `dropbox`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `email`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `ftp`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `gcs`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `google_drive`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `kinesis`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `s3`: If your CSVs are headerless, set this is as `true`. When `true`, we will generate generic column names following the convention of `column_0`, `column_1`, ... `column_n` to map the rows. Default value: `false`. + - Service `sftp`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `share_point`: Optional. If your CSV generating software doesn't provide header line for the documents, Fivetran can generate the generic column names and sync data rows with them. + - Service `wasabi_cloud_storage`: If your CSVs are headerless, set this is as `true`. When `true`, we will generate generic column names following the convention of `column_0`, `column_1`, ... `column_n` to map the rows. Default value: `false`. +- `enable_all_dimension_combinations` (Boolean) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: Whether to enable all reach dimension combinations in the report. Default value: `false` +- `enable_archive_log_only` (Boolean) Field usage depends on `service` value: + - Service `sql_server_hva`: Use archive log only mode + - Service `sql_server_sap_ecc_hva`: Use archive log only mode +- `enable_data_extensions_syncing` (Boolean) +- `enable_distributed_connector_mode` (Boolean) Field usage depends on `service` value: + - Service `cosmos`: Enable to allow the connector to join a cluster of connectors forming a Distributed Connector Cluster. This cluster allows parallel syncs from the same source to the same destination using multiple connectors. + - Service `dynamodb`: Enable to allow the connector to join a cluster of connectors forming a Distributed Connector Cluster. This cluster allows parallel syncs from the same source to the same destination using multiple connectors. +- `enable_enrichments` (Boolean) Field usage depends on `service` value: + - Service `snowplow`: Enable Enrichments +- `enable_exports` (Boolean) Field usage depends on `service` value: + - Service `braze`: Enable User Profile Exports +- `enable_tde` (Boolean) Field usage depends on `service` value: + - Service `sql_server_hva`: Using Transparent Data Encryption (TDE) + - Service `sql_server_sap_ecc_hva`: Using Transparent Data Encryption (TDE) +- `encoded_public_key` (String) Field usage depends on `service` value: + - Service `apple_search_ads`: Use the public key to grant Fivetran access to Apple Search Ads api. +- `encryption_key` (String, Sensitive) Field usage depends on `service` value: + - Service `marketo`: Marketo SOAP API Encryption Key. +- `endpoint` (String) Field usage depends on `service` value: + - Service `branch`: Webhook Endpoint + - Service `iterable`: Register the following URL for webhooks on your Iterable dashboard. + - Service `marketo`: Marketo REST API endpoint. + - Service `snowplow`: Connection-specific collector endpoint. The collector endpoint will have the `webhooks.fivetran.com/snowplow/endpoint_ID` format. You will need it to configure Snowplow to connect with Fivetran. + - Service `webhooks`: You can send your events to https://webhooks.fivetran.com/webhooks/{endpoint} +- `engagement_attribution_window` (String) Field usage depends on `service` value: + - Service `pinterest_ads`: The number of days to use as the conversion attribution window for an engagement (i.e. closeup or save) action. +- `enriched_export` (String) Field usage depends on `service` value: + - Service `optimizely`: Enriched Events S3 bucket +- `entity_id` (String) Field usage depends on `service` value: + - Service `checkout`: Your Checkout.com entity ID. + - Service `zuora`: If `is_multi_entity_feature_enabled` is `true`, then it's `EntityId`. + - Service `zuora_sandbox`: If `is_multi_entity_feature_enabled` is `true`, then it's `EntityId`. +- `environment` (String) Field usage depends on `service` value: + - Service `bazaarvoice`: Your Bazaarvoice Environment. + - Service `buildium`: Your Buildium environment. + - Service `checkout`: Your Checkout.com environment. + - Service `concord`: Your Concord environment. + - Service `invoiced`: Your Invoiced environment. + - Service `procore`: Your Procore account environment. + - Service `reltio`: Your Reltio environment. + - Service `servicetitan`: Your ServiceTitan environment. + - Service `smarthr`: Your SmartHR environment. + - Service `trelica`: Your Trelica environment. + - Service `vts`: Your VTS environment. + - Service `younium`: Your Younium API environment. + - Service `zuora`: Zuora Sandbox Environment. This accepts either of the two values Sandbox or Central Sandbox based on your subscription. The default environment is Sandbox. + - Service `zuora_sandbox`: Zuora Sandbox Environment. This accepts either of the two values Sandbox or Central Sandbox based on your subscription. The default environment is Sandbox. +- `environment_name` (String) Field usage depends on `service` value: + - Service `business_central`: Name of the environment +- `escape_char` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `azure_blob_storage`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `box`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `dropbox`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `email`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `ftp`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `gcs`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `google_drive`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `kinesis`: Optional. If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `s3`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `sftp`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `share_point`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. + - Service `wasabi_cloud_storage`: If your CSV generator follows non-standard rules for escaping quotation marks, you can set the escape character here. +- `escape_char_options` (String) Field usage depends on `service` value: + - Service `gcs`: Approach used by CSV parser. Default value: `CUSTOM_ESCAPE_CHAR`. required for CSV parsing when `non_standard_escape_char` is `true`. + - Service `s3`: Approach used by CSV parser. Default value: `CUSTOM_ESCAPE_CHAR`. required for CSV parsing when `non_standard_escape_char` is `true`. +- `eu_region` (Boolean) Field usage depends on `service` value: + - Service `kustomer`: Turn it on if your app is on EU region + - Service `survey_monkey`: The SurveyMonkey account region. Specify `true`, if your account is hosted in the EU region. Default value is `false`. +- `events` (Set of String) Field usage depends on `service` value: + - Service `iterable`: List of events to sync. Should be specified when `sync_mode` is `SelectedEvents` +- `export_storage_type` (String) Field usage depends on `service` value: + - Service `adjust`: Your cloud storage. + - Service `braze`: Export Storage. Required if `enable_exports` is `true` +- `external_id` (String) Field usage depends on `service` value: + - Service `appsflyer`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a + - Service `aws_cloudtrail`: This is the same as your `group_id`, used for authentication along with the `role_arn`. + - Service `aws_cost_report`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a + - Service `aws_lambda`: The external ID is a string that designates who can assume the role. + - Service `aws_msk`: This is the same as your `group_id`. This is required when `sasl_mechanism` is set to `IAM`. + - Service `azure_function`: External ID. + - Service `cloudfront`: This is the same as your `group_id`, used for authentication along with the `role_arn`. + - Service `dynamodb`: This is the same as your `group_id`, used for authentication along with the `role_arn`. + - Service `kinesis`: The external ID is a string that designates who can assume the role. For more information, click a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"here/a + - Service `s3`: Used for authentication along with the `role_arn`. If not provided, it uses connector's `group_id`. Use the [List All Groups endpoint](https://fivetran.com/docs/rest-api/groups#listallgroups) to find the `group_id`. + - Service `segment`: The external ID is a string that designates who can assume the role. For more information, see a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html"Amazon's AWS Identity and Access Management User Guide/a. +- `facility_codes` (String) Field usage depends on `service` value: + - Service `unicommerce`: Your uniware facility codes. +- `fields` (Set of String) Field usage depends on `service` value: + - Service `facebook`: List of fields which connector will sync. [Possible field values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#fields). +- `file_type` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: If your files are saved with improper extensions, you can force them to by synced as the selected filetype. Leave the value as infer if your files have the correct extensions + - Service `azure_blob_storage`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. + - Service `box`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. + - Service `dropbox`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. + - Service `email`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. + - Service `ftp`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. + - Service `gcs`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. + - Service `google_drive`: If your files are saved with improper extensions, you can force them to be synced as the selected filetype. + - Service `kinesis`: If your files are saved with improper extensions, you can force them to by synced as the selected filetype. Leave the value as infer if your files have the correct extensions + - Service `s3`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. + - Service `sftp`: If your files are saved with improper extensions, you can force them to by synced as the selected file type. + - Service `share_point`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. + - Service `wasabi_cloud_storage`: If your files are saved with improper extensions, you can force them to be synced as the selected file type. +- `filter` (String) Field usage depends on `service` value: + - Service `google_analytics`: String parameter restricts the data returned for your report. To use the filter parameter, specify a dimension or metric on which to filter, followed by the filter expression +- `finance_account_sync_mode` (String) Field usage depends on `service` value: + - Service `itunes_connect`: Whether to sync all finance accounts or specific finance accounts. +- `finance_accounts` (Set of String) Field usage depends on `service` value: + - Service `itunes_connect`: Specific finance accounts to sync. Must be populated if `finance_account_sync_mode` is set to `SpecificFinanceAccounts`. +- `folder` (String) Field usage depends on `service` value: + - Service `dropbox`: Your Dropbox Folder URL. +- `folder_id` (String) Field usage depends on `service` value: + - Service `box`: Folder URL + - Service `google_drive`: Folder URL +- `folder_path` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Folder Path + - Service `one_drive`: Your OneDrive folder URL + - Service `oracle_business_intelligence_publisher`: The folder path to save data models and reports. +- `forecast_id` (String) Field usage depends on `service` value: + - Service `clari`: Your Clari Forecast id . +- `ftp_host` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: FTP host. +- `ftp_password` (String, Sensitive) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: FTP password. +- `ftp_port` (Number) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: FTP port. +- `ftp_user` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: FTP user. +- `function` (String) Field usage depends on `service` value: + - Service `aws_lambda`: The name of your AWS Lambda Function. +- `function_app` (String) Field usage depends on `service` value: + - Service `azure_function`: Function app name in Azure portal. +- `function_key` (String, Sensitive) Field usage depends on `service` value: + - Service `azure_function`: Function key used for authorization. +- `function_name` (String) Field usage depends on `service` value: + - Service `azure_function`: Name of the function to be triggered. +- `function_trigger` (String, Sensitive) Field usage depends on `service` value: + - Service `google_cloud_function`: The trigger URL of the cloud function. +- `gcs_bucket` (String) Field usage depends on `service` value: + - Service `braze`: Your GCS bucket. Required if `GCS` is the `cloud_storage_type` + - Service `webhooks`: The GCS bucket name. Required if `bucket_service` is set to `GCS`. +- `gcs_export_bucket` (String) Field usage depends on `service` value: + - Service `braze`: Your GCS bucket. Required if `GCS` is the `export_storage_type` +- `gcs_export_folder` (String) Field usage depends on `service` value: + - Service `braze`: Your GCS folder name. Required if `GCS` is the `export_storage_type` +- `gcs_folder` (String) Field usage depends on `service` value: + - Service `braze`: Your GCS folder name. Required if `GCS` is the `cloud_storage_type` +- `generate_fivetran_pk` (Boolean) Field usage depends on `service` value: + - Service `workday`: Select this option to generate a Primary Key for reports where no single column or combination of columns can be used to form a Primary Key. +- `group_name` (String) Field usage depends on `service` value: + - Service `fivetran_log`: (Optional) The group name of the `target_group_id`. +- `hana_backup_password` (String, Sensitive) +- `hana_mode` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_b1`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. + - Service `hana_sap_hva_ecc`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. + - Service `hana_sap_hva_ecc_netweaver`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. + - Service `hana_sap_hva_s4`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. + - Service `hana_sap_hva_s4_netweaver`: The mode for connecting to HANA server. Available options: Single container (default), Multiple containers - Tenant database, Multiple containers - System database, Manual port selection - This option is used only if the database port needs to be specified manually. +- `has_manage_permissions` (Boolean) Field usage depends on `service` value: + - Service `azure_service_bus`: The boolean value specifying whether the connection string has manage permissions +- `historic_sync_time_frame` (String) Field usage depends on `service` value: + - Service `klaviyo`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. + - Service `marketo`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. + - Service `salesforce_marketing_cloud`: Range of data in history you would like to include in the initial sync. Default value: `ALL_TIME`. +- `historical_sync_limit` (String) Field usage depends on `service` value: + - Service `pardot`: The time range for which historical data should be synced. Default value: `All Time`. +- `home_folder` (String) Field usage depends on `service` value: + - Service `appsflyer`: Your S3 home folder path of the Data Locker. +- `host` (String) Field usage depends on `service` value: + - Service `aurora`: DB instance host or IP address. + - Service `aurora_postgres`: DB instance host or IP address. + - Service `azure_postgres`: DB instance host or IP address. + - Service `azure_sql_db`: DB instance host or IP address. + - Service `azure_sql_managed_db`: DB instance host or IP address. + - Service `clarity`: DB instance host or IP address. + - Service `cockroachdb`: DB instance host or IP address. + - Service `commercetools`: Your commercetools host. + - Service `databricks_db`: The host URL for your Databricks account. + - Service `db2i_hva`: A host address of the primary node. It should be a DB instance host/IP address with a port number. + - Service `db2i_sap_hva`: DB instance host or IP address. + - Service `documentdb`: Host IP address of the primary node. Ignored if `hosts` value is provided. + - Service `dynamics_365_fo`: DB instance host or IP address. + - Service `ehr`: DB instance host or IP address. + - Service `elastic_cloud`: DB instance host or IP address. + - Service `es_self_hosted`: DB instance host or IP address. + - Service `ftp`: FTP host address. + - Service `google_cloud_mysql`: DB instance host or IP address. + - Service `google_cloud_postgresql`: DB instance host or IP address. + - Service `google_cloud_sqlserver`: DB instance host or IP address. + - Service `hana_sap_hva_b1`: DB instance host or IP address. + - Service `hana_sap_hva_ecc`: DB instance host or IP address. + - Service `hana_sap_hva_ecc_netweaver`: DB instance host or IP address. + - Service `hana_sap_hva_s4`: DB instance host or IP address. + - Service `hana_sap_hva_s4_netweaver`: DB instance host or IP address. + - Service `heroku_postgres`: DB instance host or IP address. + - Service `jira`: The Jira service host address. + - Service `magento_mysql`: DB instance host or IP address. + - Service `magento_mysql_rds`: DB instance host or IP address. + - Service `maria`: DB instance host or IP address. + - Service `maria_azure`: DB instance host or IP address. + - Service `maria_rds`: DB instance host or IP address. + - Service `marin`: The Marin host address. + - Service `mysql`: DB instance host or IP address. + - Service `mysql_azure`: DB instance host or IP address. + - Service `mysql_rds`: DB instance host or IP address. + - Service `netsuite_suiteanalytics`: The NetSuite service host address. + - Service `opendistro`: DB instance host or IP address. + - Service `opensearch`: DB instance host or IP address. + - Service `oracle`: DB instance host or IP address. + - Service `oracle_ebs`: DB instance host or IP address. + - Service `oracle_hva`: DB instance host or IP address. + - Service `oracle_rac`: DB instance host or IP address. + - Service `oracle_rds`: DB instance host or IP address. + - Service `oracle_sap_hva`: DB instance host or IP address. + - Service `oracle_sap_hva_netweaver`: DB instance host or IP address. + - Service `postgres`: DB instance host or IP address. + - Service `postgres_rds`: DB instance host or IP address. + - Service `redshift_db`: Host name + - Service `sap_hana`: The SAP HANA host or IP address. + - Service `sap_s4hana`: The SAP S/4 host or IP address. + - Service `sftp`: SFTP host address. + - Service `snowflake_db`: Host name + - Service `splunk`: The Splunk service host address. + - Service `sql_server`: DB instance host or IP address. + - Service `sql_server_hva`: DB instance host or IP address. + - Service `sql_server_rds`: DB instance host or IP address. + - Service `sql_server_sap_ecc_hva`: DB instance host or IP address. +- `host_ip` (String) Field usage depends on `service` value: + - Service `azure_blob_storage`: IP address of host tunnel machine which is used to connect to the Storage container. + - Service `azure_service_bus`: The IP address of the host machine which we use to connect to ASB via ssh +- `host_name` (String) Field usage depends on `service` value: + - Service `coassemble`: Your Coassemble Hostname. + - Service `datadog`: Your Datadog host name. +- `host_url` (String) Field usage depends on `service` value: + - Service `adobe_commerce`: Your Adobe Commerce host url. + - Service `sparkpost`: Your SparkPost host URL. +- `host_user` (String) Field usage depends on `service` value: + - Service `azure_blob_storage`: Username in the host machine. + - Service `azure_service_bus`: The username on the host machine which we use to connect to ASB via ssh +- `hostname` (String) Field usage depends on `service` value: + - Service `akamai`: Your Akamai hostname. + - Service `ukg_pro`: Your UKG Pro hostname. +- `hosts` (Set of String) Field usage depends on `service` value: + - Service `azure_cosmos_for_mongo`: A list of host addresses for Azure Cosmos DB for Mongo DB. + - Service `mongo`: A list of host addresses of the primary node and all replicas. Each list item is either: a DB instance host/IP address with a port number, or SRV host record. + - Service `mongo_sharded`: A list of host addresses of the primary node and all replicas. Each list item is either: a DB instance host/IP address with a port number, or SRV host record. +- `http_path` (String) Field usage depends on `service` value: + - Service `databricks_db`: http path +- `identifier` (String) Field usage depends on `service` value: + - Service `playvox_workforce_management`: Your Playvox Workforce Management Identifier. + - Service `statistics_netherlands_cbs`: Your Statistics Netherlands CBS catalog identifier. +- `identity` (String) Field usage depends on `service` value: + - Service `marketo`: Marketo REST API identity url. +- `include_ocapi_endpoints` (Boolean) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: Whether to sync data through OCAPI endpoints. +- `instance` (String) Field usage depends on `service` value: + - Service `acumatica`: Your Acumatica instance name. + - Service `coupa`: The instance name of your Coupa account in the URL. + - Service `salesforce_marketing_cloud`: The Salesforce Marketing Cloud instance ID + - Service `servicenow`: ServiceNow Instance ID. +- `instance_number` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_b1`: Two-digit number (00-97) of the SAP instance within its host. + - Service `hana_sap_hva_ecc`: Two-digit number (00-97) of the SAP instance within its host. + - Service `hana_sap_hva_ecc_netweaver`: Two-digit number (00-97) of the SAP instance within its host. + - Service `hana_sap_hva_s4`: Two-digit number (00-97) of the SAP instance within its host. + - Service `hana_sap_hva_s4_netweaver`: Two-digit number (00-97) of the SAP instance within its host. + - Service `oracle_sap_hva_netweaver`: Two-digit number (00-97) of the SAP instance within its host. +- `instance_url` (String) Field usage depends on `service` value: + - Service `sap_business_by_design`: The SAP Business ByDesign instance URL. +- `integration_key` (String, Sensitive) Field usage depends on `service` value: + - Service `pendo`: The integration key of the Pendo account. +- `is_account_level_connector` (Boolean) Field usage depends on `service` value: + - Service `fivetran_log`: (Optional) Retrieve account-level logs. +- `is_auth2_enabled` (Boolean) Field usage depends on `service` value: + - Service `apple_search_ads`: The contents of your PEM certificate file. Default value: `false` +- `is_custom_api_credentials` (Boolean) Field usage depends on `service` value: + - Service `twitter_ads`: Custom API credentials +- `is_external_activities_endpoint_selected` (Boolean) Field usage depends on `service` value: + - Service `pardot`: Whether the `EXTERNAL_ACTIVITY` table must be synced or not. +- `is_ftps` (Boolean) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Use Secure FTP (FTPS). +- `is_keypair` (Boolean) Field usage depends on `service` value: + - Service `sftp`: Whether to use a key pair for authentication. When `true`, do not use `password`. +- `is_multi_entity_feature_enabled` (Boolean) Field usage depends on `service` value: + - Service `zuora`: Set to `true` if there are multiple entities in your Zuora account and you only want to use one entity. Otherwise, set to `false`. + - Service `zuora_sandbox`: Set to `true` if there are multiple entities in your Zuora account and you only want to use one entity. Otherwise, set to `false`. +- `is_new_package` (Boolean) Field usage depends on `service` value: + - Service `salesforce_marketing_cloud`: Indicates that that your installed package uses OAuth 2.0. Default value: `false` +- `is_private_key_encrypted` (Boolean) Field usage depends on `service` value: + - Service `snowflake_db`: Indicates that a private key is encrypted. The default value: `false`. The field can be specified if authentication type is `KEY_PAIR`. +- `is_private_link_required` (Boolean) Field usage depends on `service` value: + - Service `aws_lambda`: We use PrivateLink by default if your AWS Lambda is in the same region as Fivetran. Turning on this toggle ensures that Fivetran always connects to AWS lambda over PrivateLink. Learn more in our [PrivateLink documentation](https://fivetran.com/docs/connectors/databases/connection-options#awsprivatelink). +- `is_public` (Boolean) Field usage depends on `service` value: + - Service `aws_cost_report`: Whether you are syncing from a public bucket. Default value: `false`. + - Service `kinesis`: Is the bucket public? (you don't need an AWS account for syncing public buckets!) + - Service `s3`: Whether you are syncing from a public bucket. Default value: `false`. +- `is_sailthru_connect_enabled` (Boolean) Field usage depends on `service` value: + - Service `sailthru`: Enable this if you want to sync Sailthru Connect +- `is_secure` (Boolean) Field usage depends on `service` value: + - Service `ftp`: Whether the server supports FTPS. +- `is_sftp_creds_available` (Boolean) Field usage depends on `service` value: + - Service `salesforce_marketing_cloud`: Provide SFTP credentials +- `is_single_table_mode` (Boolean) Field usage depends on `service` value: + - Service `box`: Allows the creation of connector using Merge Mode strategy. + - Service `dropbox`: Allows the creation of connector using Merge Mode strategy. + - Service `google_drive`: Allows the creation of connector using Merge Mode strategy. + - Service `sftp`: Allows the creation of connector using the specified Sync strategy. + - Service `share_point`: Allows the creation of connector using Merge Mode strategy. +- `is_vendor` (Boolean) Field usage depends on `service` value: + - Service `amazon_selling_partner`: Whether or not you have a Vendor Account. Default value: `false`. +- `json_delivery_mode` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Control how your JSON data is delivered into your destination + - Service `azure_blob_storage`: Control how your JSON data is delivered into your destination + - Service `box`: Control how your JSON data is delivered into your destination + - Service `dropbox`: Control how your JSON data is delivered into your destination + - Service `email`: Control how your JSON data is delivered into your destination + - Service `ftp`: Control how your JSON data is delivered into your destination + - Service `gcs`: Control how your JSON data is delivered into your destination + - Service `google_drive`: Control how your JSON data is delivered into your destination + - Service `kinesis`: Control how your JSON data is delivered into your destination + - Service `s3`: Control how your JSON data is delivered into your destination + - Service `sftp`: Control how your JSON data is delivered into your destination + - Service `share_point`: Control how your JSON data is delivered into your destination + - Service `wasabi_cloud_storage`: Specifies how Fivetran should handle your JSON data. Default value: `Packed`. +- `key` (String, Sensitive) Field usage depends on `service` value: + - Service `uservoice`: The UserVoice API key. +- `key_password` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_msk`: If `security_protocol` is set to `TLS`, enter your `Key Password`. +- `key_store_type` (String) Field usage depends on `service` value: + - Service `heroku_kafka`: Key Store Type +- `keystore` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_msk`: If `security_protocol` is set to `TLS`, add the `Keystore File` as Base64 encoded string. +- `keystore_password` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_msk`: If `security_protocol` is set to `TLS`, enter your `Keystore Password`. +- `legal_entity_id` (String) Field usage depends on `service` value: + - Service `younium`: Your Younium legal entity ID. +- `limit_for_api_calls_to_external_activities_endpoint` (Number) Field usage depends on `service` value: + - Service `pardot`: API limit for the external activities endpoint. +- `line_separator` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `azure_blob_storage`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `box`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `dropbox`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `email`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `ftp`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `gcs`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `google_drive`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `kinesis`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `s3`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `sftp`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `share_point`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. + - Service `wasabi_cloud_storage`: You can specify the custom line separator for your CSV files. The line separator is used in files to separate one row from the next. +- `list_of_company_ids` (String) Field usage depends on `service` value: + - Service `cj_commission_detail`: Your CJ Commission Detail list of company IDs. +- `list_strategy` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. If you have a file structure where new files are always named in lexicographically increasing order such as files being named in increasing order of time, you can select codetime_based_pattern_listing/code. + - Service `kinesis`: Optional. If you have a file structure where new files are always named in lexicographically increasing order such as files being named in increasing order of time, you can select codetime_based_pattern_listing/code. + - Service `s3`: The listing strategy you want to use. Default value: `complete_listing`. +- `list_sync_mode` (String) Field usage depends on `service` value: + - Service `google_analytics_4_export`: The Sync Mode +- `log_journal` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The log journal name. + - Service `db2i_sap_hva`: The log journal name. +- `log_journal_schema` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The log journal schema. + - Service `db2i_sap_hva`: The log journal schema. +- `log_on_group` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Name of the SAP logon group. The default value is PUBLIC. This field is optional. + - Service `hana_sap_hva_s4_netweaver`: Name of the SAP logon group. The default value is PUBLIC. This field is optional. +- `login` (String) Field usage depends on `service` value: + - Service `rebound_returns`: Your ReBound Returns login. + - Service `the_trade_desk`: The Trade Desk email. It is a part of the login credentials. + - Service `walmart_dsp`: Walmart DSP email. It is a part of the login credentials. +- `login_password` (String, Sensitive) Field usage depends on `service` value: + - Service `concur`: The SAP Concur password. + - Service `sage_intacct`: The login password. It is a part of the login credentials. +- `manager_accounts` (Set of String) Field usage depends on `service` value: + - Service `google_ads`: The list of the Manager Account IDs whose clients will be synced. Must be populated if `sync_mode` is set to `ManagerAccounts`. +- `max_api_requests_per_day` (Number) Field usage depends on `service` value: + - Service `reltio`: Maximum API requests per day +- `merchant_id` (String) Field usage depends on `service` value: + - Service `afterpay`: Your Afterpay Merchant ID. + - Service `amazon_selling_partner`: The Merchant ID or Vendor Code. + - Service `avantlink`: Your AvantLink Merchant ID. + - Service `braintree`: Your Braintree merchant ID. + - Service `braintree_sandbox`: Your Braintree merchant ID. + - Service `xsolla`: Your Xsolla Merchant ID. +- `message_type` (String) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka message type. + - Service `aws_msk`: The Message type. + - Service `azure_event_hub`: Message type. + - Service `azure_service_bus`: The format of messages in the topic + - Service `confluent_cloud`: Confluent Cloud message type. + - Service `heroku_kafka`: Heroku Kafka message type. +- `metrics` (Set of String) Field usage depends on `service` value: + - Service `adroll`: The metrics that you want to sync. + - Service `criteo`: Metrics + - Service `double_click_campaign_manager`: Report metrics to include into a sync. + - Service `google_analytics`: The report metrics to include into a sync. + - Service `google_display_and_video_360`: The report metrics to include into a sync. The metric names are provided in the API format. This is a required parameter when `config_method` is set to `CREATE_NEW`. +- `named_range` (String) Field usage depends on `service` value: + - Service `google_sheets`: The name of the named data range on the sheet that contains the data to be synced. +- `namespace` (String) Field usage depends on `service` value: + - Service `azure_service_bus`: The ASB namespace which we have to sync. Required for `AzureActiveDirectory` authentication. +- `network_code` (Number) Field usage depends on `service` value: + - Service `double_click_publishers`: Network code is a unique, numeric identifier for your Ad Manager network. +- `non_standard_escape_char` (Boolean) Field usage depends on `service` value: + - Service `gcs`: Use this if your CSV generator uses non-standard ways of escaping characters. Default value: `false`. + - Service `s3`: Use this if your CSV generator uses non-standard ways of escaping characters. Default value: `false`. +- `null_sequence` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. If your CSVs use a special value indicating null, you can specify it here. + - Service `azure_blob_storage`: If your CSVs use a special value indicating null, you can specify it here. + - Service `box`: If your CSVs use a special value indicating null, you can specify it here. + - Service `dropbox`: If your CSVs use a special value indicating null, you can specify it here. + - Service `email`: If your CSVs use a special value indicating null, you can specify it here. + - Service `ftp`: If your CSVs use a special value indicating null, you can specify it here. + - Service `gcs`: If your CSVs use a special value indicating null, you can specify it here. + - Service `google_drive`: If your CSVs use a special value indicating null, you can specify it here. + - Service `kinesis`: Optional. If your CSVs use a special value indicating null, you can specify it here. + - Service `s3`: If your CSVs use a special value indicating null, you can specify it here. + - Service `sftp`: If your CSVs use a special value indicating null, you can specify it here. + - Service `share_point`: If your CSVs use a special value indicating null, you can specify it here. + - Service `wasabi_cloud_storage`: If your CSVs use a special value indicating null, you can specify it here. +- `oauth_token` (String, Sensitive) +- `oauth_token_secret` (String, Sensitive) +- `ocapi_client_id` (String) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI Client ID. +- `ocapi_client_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI Client secret. +- `ocapi_custom_object_types` (String) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI custom object types. +- `ocapi_hostname` (String) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The Salesforce Commerce Cloud OCAPI hostname. +- `odbc_driver_manager_library_path` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_b1`: ODBC manager library path + - Service `hana_sap_hva_ecc`: ODBC manager library path + - Service `hana_sap_hva_s4`: ODBC manager library path +- `odbc_sys_ini_path` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_b1`: odbc.ini and odbcsinst.ini location + - Service `hana_sap_hva_ecc`: odbc.ini and odbcsinst.ini location + - Service `hana_sap_hva_s4`: odbc.ini and odbcsinst.ini location +- `on_error` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `azure_blob_storage`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `box`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `dropbox`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `ftp`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `gcs`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `google_drive`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `kinesis`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `s3`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as `fail` unless you are certain that you have undesirable, malformed data. + - Service `sftp`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `share_point`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as fail unless you are certain that you have undesirable, malformed data. + - Service `wasabi_cloud_storage`: If you know that your files contain some errors, you can choose to have poorly formatted lines skipped. We recommend leaving the value as `fail` unless you are certain that you have undesirable, malformed data. +- `on_premise` (Boolean) Field usage depends on `service` value: + - Service `jira`: Whether the Jira instance is local or in cloud. +- `organization` (String) Field usage depends on `service` value: + - Service `gladly`: Your Gladly Organization Name. + - Service `statuspage`: Your Statuspage Organization ID. +- `organization_domain` (String) Field usage depends on `service` value: + - Service `adobe_workfront`: Your Adobe Workfront organization domain. +- `organization_id` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: Organization ID from the Service Account (JWT) credentials of your Adobe Project. + - Service `integrate`: Your Integrate organization ID. + - Service `megaphone`: Your Megaphone organization ID. + - Service `productive`: Your Productive Organization ID. + - Service `salesforce_commerce_cloud`: The organization ID from Salesforce Commerce Cloud account. + - Service `zoho_books`: Your Zoho Books Organization ID. + - Service `zoho_inventory`: Your Zoho Inventory organization ID. +- `organization_name` (String) Field usage depends on `service` value: + - Service `brightpearl`: Your Brightpearl organization name. + - Service `confluence`: Your Confluence organization name. +- `organizations` (Set of String) Field usage depends on `service` value: + - Service `apple_search_ads`: Organizations + - Service `snapchat_ads`: Specific organizations IDs to sync. Must be populated if `syncMode` is set to `SpecificOrganizations`. +- `packed_mode_tables` (Set of String) Field usage depends on `service` value: + - Service `azure_cosmos_for_mongo`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). + - Service `dynamodb`: List of tables to be synced in packed mode. + - Service `firebase`: Specific tables to sync. Must be populated if `packing_mode` is set to `SelectTablesForPackedMode`. + - Service `mongo`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). + - Service `mongo_sharded`: List of tables to be synced in packed mode; format:`db.table`(case-sensitive). +- `packing_mode` (String) Field usage depends on `service` value: + - Service `azure_cosmos_for_mongo`: Indicates the desired sync pack mode. Accepted values are `UsePackedModeOnly` and `UseUnpackedModeOnly`. `SelectTablesForPackedMode` is deprecated. + - Service `firebase`: Whether to sync all tables in unpacked mode or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. + - Service `klaviyo`: Packing mode for EVENT and PERSON tables. + - Service `mongo`: Whether to sync all tables in unpacked mode only, all tables in packed mode only, or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. + - Service `mongo_sharded`: Whether to sync all tables in unpacked mode only, all tables in packed mode only, or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. + - Service `optimizely`: Packing mode for conversion and decision tables. + - Service `sailthru`: Packing mode for LIST_STATE and USER tables. +- `pages` (Set of String) Field usage depends on `service` value: + - Service `facebook_pages`: Specific pages to sync. Must be populated if `sync_mode` is set to `SpecificPages`. +- `partner_code` (String) Field usage depends on `service` value: + - Service `care_quality_commission`: Your Care Quality Commission partner code. +- `partner_user_id` (String) Field usage depends on `service` value: + - Service `expensify`: Your Expensify partnerUserID. +- `partner_user_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `expensify`: Your Expensify partnerUserSecret. +- `partners` (Set of String) Field usage depends on `service` value: + - Service `google_display_and_video_360`: The list of partners to include into a sync. This parameter only takes effect when `config_method` is set to `CREATE_NEW`. + - Service `the_trade_desk`: Specific Partner IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. + - Service `walmart_dsp`: Specific Partner IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. +- `pass_phrase` (String, Sensitive) Field usage depends on `service` value: + - Service `qualtrics`: Pass Phrase +- `passphrase` (String, Sensitive) Field usage depends on `service` value: + - Service `snowflake_db`: In case private key is encrypted, you are required to enter passphrase that was used to encrypt the private key. The field can be specified if authentication type is `KEY_PAIR`. +- `password` (String, Sensitive) Field usage depends on `service` value: + - Service `absorb_lms`: Your Absorb LMS password. + - Service `adobe_commerce`: Your Adobe Commerce password. + - Service `anaplan`: Your Anaplan password. Must be populated if `auth_mode` is set to `Basic`. + - Service `appfigures`: Your Appfigures Password. + - Service `aurora`: The user's password. + - Service `aurora_postgres`: The user's password. + - Service `azure_cosmos_for_mongo`: Password used for source database authentication. + - Service `azure_postgres`: The user's password. + - Service `azure_sql_db`: The user's password. + - Service `azure_sql_managed_db`: The user's password. + - Service `boostr`: Your Boostr password. + - Service `ceridian_dayforce`: Your Ceridian Dayforce Password. + - Service `cin7`: Your Cin7 API Key. + - Service `clarity`: The user's password. + - Service `cockroachdb`: The user's password. + - Service `collibra`: Your collibra password. + - Service `contrast_security`: Your Contrast Security API Password. + - Service `db2i_hva`: The user's password. + - Service `db2i_sap_hva`: The user's password. + - Service `documentdb`: The user's password. + - Service `dynamics_365_fo`: The user's password. + - Service `ehr`: The user's password. + - Service `elastic_cloud`: The user's password. + - Service `es_self_hosted`: The user's password. + - Service `ftp`: FTP password. + - Service `globalmeet`: Your GlobalMeet Password. + - Service `google_cloud_mysql`: The user's password. + - Service `google_cloud_postgresql`: The user's password. + - Service `google_cloud_sqlserver`: The user's password. + - Service `green_power_monitor`: Your GreenPowerMonitor password. + - Service `guru`: Your Guru password. + - Service `hana_sap_hva_b1`: The user's password. + - Service `hana_sap_hva_ecc`: The user's password. + - Service `hana_sap_hva_ecc_netweaver`: The user's password. + - Service `hana_sap_hva_s4`: The user's password. + - Service `hana_sap_hva_s4_netweaver`: The user's password. + - Service `heroku_postgres`: The user's password. + - Service `impact`: Your Impact Account Token + - Service `integral_ad_science`: Your integral_ad_science password. + - Service `itunes_connect`: Your password + - Service `jamf`: Your Jamf password. + - Service `jira`: The Jira user's password. + - Service `khoros_care`: Your Khoros Care password. + - Service `kissmetrics`: Your Kissmetrics API Password. + - Service `klarna`: Your Klarna Password. + - Service `lessonly`: Your Lessonly password. + - Service `magento_mysql`: The user's password. + - Service `magento_mysql_rds`: The user's password. + - Service `maria`: The user's password. + - Service `maria_azure`: The user's password. + - Service `maria_rds`: The user's password. + - Service `marin`: The Marin user's password. + - Service `moloco`: Your Moloco account password. + - Service `mongo`: The user's password. + - Service `mongo_sharded`: The user's password. + - Service `myosh`: Your myosh password. + - Service `mysql`: The user's password. + - Service `mysql_azure`: The user's password. + - Service `mysql_rds`: The user's password. + - Service `netsuite_suiteanalytics`: The NetSuite user's password. + - Service `opendistro`: The user's password. + - Service `opensearch`: The user's password. + - Service `oracle`: The user's password. + - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence user password. + - Service `oracle_ebs`: The user's password. + - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud user password. + - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud user password. + - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud user password. + - Service `oracle_hva`: The user's password. + - Service `oracle_rac`: The user's password. + - Service `oracle_rds`: The user's password. + - Service `oracle_sap_hva`: The user's password. + - Service `oracle_sap_hva_netweaver`: The user's password. + - Service `outbrain`: The Outbrain user's password. + - Service `pardot`: The Pardot user's password. + - Service `partnerize`: Your Partnerize account's password. + - Service `podio`: Your Podio password. + - Service `postgres`: The user's password. + - Service `postgres_rds`: The user's password. + - Service `redshift_db`: The Redshift user's password. + - Service `revx`: Your RevX Password. + - Service `rtb_house`: Your RTB House password. + - Service `sap_business_by_design`: The SAP Business ByDesign password. + - Service `sap_hana`: Your SAP HANA password. + - Service `sap_s4hana`: Your SAP S/4 password. + - Service `scorm`: Your Scorm Secret Key. + - Service `servicenow`: Your account password. + - Service `sftp`: SFTP password. + - Service `shiphero`: Your ShipHero password. + - Service `shipstation`: Your ShipStation password. + - Service `shopware`: Your Shopware password. + - Service `skuvault`: Your SkuVault password. + - Service `smadex`: Your Smadex Password. + - Service `snowflake_db`: The Snowflake user's password. + - Service `splash`: Your Splash password. + - Service `splunk`: The Splunk user's password. + - Service `sql_server`: The user's password. + - Service `sql_server_hva`: The user's password. + - Service `sql_server_rds`: The user's password. + - Service `sql_server_sap_ecc_hva`: The user's password. + - Service `starrez`: Your StarRez API password + - Service `stylight`: Your Stylight Password. + - Service `teamwork`: Your Teamwork password. + - Service `the_trade_desk`: The Trade Desk password. It is a part of the login credentials. + - Service `toggl_track`: Your Toggl Track Password + - Service `ukg_pro`: Your UKG Pro password. + - Service `unicommerce`: Your uniware login password. + - Service `upland`: Your Upland Software Password. + - Service `veevavault`: Your Veeva Vault password. + - Service `walmart_dsp`: Walmart DSP password. It is a part of the login credentials. + - Service `when_i_work`: Your When I Work password. + - Service `wherefour`: Your Wherefour password. + - Service `workday`: Workday password. + - Service `workday_financial_management`: Workday password. + - Service `workday_hcm`: Workday password. + - Service `xandr`: Your Xandr password. + - Service `younium`: Your Younium password. +- `pat` (String, Sensitive) Field usage depends on `service` value: + - Service `github`: The `Personal Access Token` generated in Github. +- `pat_name` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Name. +- `pat_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source PAT Secret. +- `path` (String) Field usage depends on `service` value: + - Service `jira`: A URL subdirectory where the Jira instance is working. +- `pats` (Set of String, Sensitive) Field usage depends on `service` value: + - Service `github`: The `Personal Access Tokens`. +- `pattern` (String) Field usage depends on `service` value: + - Service `aws_cost_report`: Optional. All files in your search path matching this regular expression will be synced. This parameter is optional. + - Service `azure_blob_storage`: All files in your search path matching this regular expression will be synced. + - Service `box`: All files in your search path matching this regular expression will be synced. + - Service `dropbox`: All files in your search path matching this regular expression will be synced. + - Service `email`: All files in your search path matching this regular expression will be synced. + - Service `ftp`: All files in your search path matching this regular expression will be synced. + - Service `gcs`: All files in your search path matching this regular expression will be synced. + - Service `google_drive`: All files in your search path matching this regular expression will be synced. + - Service `kinesis`: Optional. All files in your search path matching this regular expression will be synced. This parameter is optional. + - Service `s3`: All files in your search path matching this regular expression will be synced. + - Service `sftp`: All files in your search path matching this regular expression will be synced. + - Service `share_point`: All files in your search path matching this regular expression will be synced. + - Service `wasabi_cloud_storage`: All files in your search path matching this regular expression will be synced. +- `pdb_name` (String) Field usage depends on `service` value: + - Service `oracle`: Optional: Required only for containerized database. + - Service `oracle_ebs`: Optional: Required only for containerized database. + - Service `oracle_hva`: (Multi-tenant databases only) The database's PDB name. Exclude this parameter for single-tenant databases. + - Service `oracle_rac`: Optional: Required only for containerized database. + - Service `oracle_rds`: Optional: Required only for containerized database. + - Service `oracle_sap_hva`: (Multi-tenant databases only) The database's PDB name. Exclude this parameter for single-tenant databases. +- `pem_certificate` (String, Sensitive) Field usage depends on `service` value: + - Service `apple_search_ads`: The contents of your PEM certificate file. Must be populated if `is_auth2_enabled` is set to `false`. +- `pem_private_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apple_search_ads`: The contents of your PEM secret key file. Must be populated if `is_auth2_enabled` is set to `true`. +- `per_interaction_dimensions` (Set of String) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: Per Interaction Dimensions. +- `personal_access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `cj_commission_detail`: Your CJ Commission Detail personal access token. + - Service `databricks_db`: Access Token + - Service `harvest`: Your Harvest Personal Access Token. + - Service `productive`: Your Productive personal access token. + - Service `totango`: Your Totango personal access token. +- `personal_api_token` (String, Sensitive) Field usage depends on `service` value: + - Service `circleci`: Your CircleCI Personal API token. + - Service `monday`: Your Monday.com Personal API Token. +- `pgp_pass_phrase` (String, Sensitive) Field usage depends on `service` value: + - Service `azure_blob_storage`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `ftp`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `gcs`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `s3`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `sftp`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `wasabi_cloud_storage`: The PGP passphrase used to create the key. Must be populated if `use_pgp_encryption_options` is set to `true`. +- `pgp_secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `azure_blob_storage`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `ftp`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `gcs`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `s3`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `sftp`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. + - Service `wasabi_cloud_storage`: The contents of your PGP secret key file. Must be populated if `use_pgp_encryption_options` is set to `true`. +- `phone_number` (String) Field usage depends on `service` value: + - Service `itunes_connect`: Register the number on AppleId Account Page for 2FA +- `port` (Number) Field usage depends on `service` value: + - Service `aurora`: The port number. + - Service `aurora_postgres`: The port number. + - Service `azure_postgres`: The port number. + - Service `azure_sql_db`: The port number. + - Service `azure_sql_managed_db`: The port number. + - Service `clarity`: The port number. + - Service `cockroachdb`: The port number. + - Service `databricks_db`: The port of your SQL warehouse. + - Service `db2i_hva`: The port number. + - Service `db2i_sap_hva`: The port number. + - Service `documentdb`: Port of the primary node. Ignored if `hosts` value is provided. + - Service `dynamics_365_fo`: The port number. + - Service `ehr`: The port number. + - Service `elastic_cloud`: The port number. + - Service `es_self_hosted`: The port number. + - Service `ftp`: FTP port. + - Service `google_cloud_mysql`: The port number. + - Service `google_cloud_postgresql`: The port number. + - Service `google_cloud_sqlserver`: The port number. + - Service `hana_sap_hva_b1`: The port number. + - Service `hana_sap_hva_ecc`: The port number. + - Service `hana_sap_hva_ecc_netweaver`: The port number. + - Service `hana_sap_hva_s4`: The port number. + - Service `hana_sap_hva_s4_netweaver`: The port number. + - Service `heroku_postgres`: The port number. + - Service `jira`: The Jira service host port. + - Service `magento_mysql`: The port number. + - Service `magento_mysql_rds`: The port number. + - Service `maria`: The port number. + - Service `maria_azure`: The port number. + - Service `maria_rds`: The port number. + - Service `mysql`: The port number. + - Service `mysql_azure`: The port number. + - Service `mysql_rds`: The port number. + - Service `netsuite_suiteanalytics`: The NetSuite service host port. + - Service `opendistro`: The port number. + - Service `opensearch`: The port number. + - Service `oracle`: The port number. + - Service `oracle_ebs`: The port number. + - Service `oracle_hva`: The port number. + - Service `oracle_rac`: The port number. + - Service `oracle_rds`: The port number. + - Service `oracle_sap_hva`: The port number. + - Service `postgres`: The port number. + - Service `postgres_rds`: The port number. + - Service `redshift_db`: Port number + - Service `sap_hana`: The SAP HANA port number. + - Service `sap_s4hana`: The SAP S/4 port number. + - Service `sftp`: SFTP port. + - Service `snowflake_db`: The Snowflake optional port number. + - Service `splunk`: The Splunk service host port. + - Service `sql_server`: The port number. + - Service `sql_server_hva`: The port number. + - Service `sql_server_rds`: The port number. + - Service `sql_server_sap_ecc_hva`: The port number. +- `post_click_attribution_window_size` (String) Field usage depends on `service` value: + - Service `linkedin_ads`: The time period to attribute conversions based on clicks. Default value: `DAY_30` +- `prebuilt_report` (String) Field usage depends on `service` value: + - Service `facebook`: The name of report of which connector will sync the data. [Possible prebuilt_report values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#prebuiltreport). + - Service `google_analytics`: The name of the Prebuilt Report from which the connector will sync the data. +- `prefix` (String) Field usage depends on `service` value: + - Service `aws_cloudtrail`: If prefix is present when configuring the bucket. + - Service `aws_cost_report`: Folder path to the aws_cost_report files within the bucket. + - Service `aws_inventory`: The prefix if you used one when configuring the bucket. + - Service `azure_blob_storage`: All files and folders under this folder path will be searched for files to sync. + - Service `box`: All files and folders under this folder path will be searched for files to sync. + - Service `cloudfront`: The name of the CloudFront folder in the bucket. + - Service `dropbox`: All files and folders under this folder path will be searched for files to sync. + - Service `ftp`: All files and folders under this folder path will be searched for files to sync. + - Service `gcs`: All files and folders under this folder path will be searched for files to sync. + - Service `google_drive`: All files and folders under this folder path will be searched for files to sync, this can be the entire drive link or a folder URL + - Service `kinesis`: Folder path to the Kinesis files within the bucket. + - Service `marin`: Folder path to the Marin manifest file. + - Service `microsoft_dynamics_365_fo`: Folder name in which the exported dynamics 365 data resides. + - Service `s3`: All files and folders under this folder path will be searched for files to sync. + - Service `segment`: Folder path to the Segment files within the bucket. Must be populated if `sync_type` is set to `S3`. + - Service `sftp`: All files and folders under this folder path will be searched for files to sync. + - Service `share_point`: All files and folders under this folder path link will be searched for files to sync. This can be any shared folder link. + - Service `wasabi_cloud_storage`: All files and folders under this folder path will be searched for files to sync. +- `primary_key` (String, Sensitive) Field usage depends on `service` value: + - Service `care_quality_commission`: Your Care Quality Commission primary key. +- `primary_keys` (Set of String) Field usage depends on `service` value: + - Service `workday`: Primary Keys +- `private_key` (String, Sensitive) Field usage depends on `service` value: + - Service `absorb_lms`: Your Absorb LMS REST API private key. + - Service `adobe_analytics`: The complete contents of your private key file including the start and end tags (`----BEGIN PRIVATE KEY----` to `----END PRIVATE KEY----`). + - Service `adp_workforce_now`: Private Key. + - Service `anaplan`: The contents of your private key file. Must be populated if `auth_mode` is set to `Certificate`. + - Service `apple_search_ads`: The contents of your secret key file. Must be populated if `is_auth2_enabled` is set to `false`. + - Service `braintree`: The contents of your secret key file. + - Service `braintree_sandbox`: The contents of your secret key file. + - Service `qualtrics`: Your private key + - Service `snowflake_db`: Private access key. The field should be specified if authentication type is `KEY_PAIR`. +- `private_token` (String, Sensitive) Field usage depends on `service` value: + - Service `eventbrite`: Your Eventbrite private token. +- `product` (String) Field usage depends on `service` value: + - Service `webconnex`: Your Webconnex product. +- `profiles` (Set of String) Field usage depends on `service` value: + - Service `amazon_ads`: Specific User Profile IDs to sync. Must be populated if `sync_mode` is set to `SpecificProfiles`. + - Service `google_analytics`: Specific User Profile IDs to sync. Must be populated if `syncMode` is set to `SpecificAccounts`. + - Service `google_analytics_mcf`: Specific User Profile IDs to sync. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. +- `project_access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `rollbar`: Your Rollbar project access token. +- `project_credentials` (Block Set) (see [below for nested schema](#nestedblock--config--project_credentials)) +- `project_id` (String, Sensitive) Field usage depends on `service` value: + - Service `bigquery_db`: BigQuery project ID + - Service `google_analytics_360`: The project ID. + - Service `google_analytics_4_export`: The Project ID. + - Service `mixpanel`: Project ID +- `project_key` (String) Field usage depends on `service` value: + - Service `commercetools`: Your commercetools project key. +- `projects` (Set of String) Field usage depends on `service` value: + - Service `asana`: Specific Project IDs to sync. Must be populated if `syncMode` is set to `SpecificProjects`. + - Service `jira`: Specific projects to sync. Must be populated if `syncMode` is set to `CUSTOM`. +- `properties` (Set of String) Field usage depends on `service` value: + - Service `google_analytics_4`: The array of strings in the `properties/{id}` format where `id` is a Google Analytics 4 property identifier. Must be populated if `sync_mode` is set to `SPECIFIC_ACCOUNTS`. +- `property_id` (String) Field usage depends on `service` value: + - Service `cloudbeds`: Your Cloudbeds Property IDs. +- `public_key` (String) Field usage depends on `service` value: + - Service `aurora`: Public Key + - Service `aurora_postgres`: Public Key + - Service `azure_blob_storage`: Public key generated by Fivetran to be copied into the host-machine's authorized keys file. + - Service `azure_cosmos_for_mongo`: Public Key + - Service `azure_postgres`: Public Key + - Service `azure_service_bus`: Public key generated by Fivetran to be copied into the host-machine's authorized keys file. + - Service `azure_sql_db`: Public Key. + - Service `azure_sql_managed_db`: Public Key. + - Service `braintree`: The contents of your PEM certificate file. + - Service `braintree_sandbox`: The contents of your PEM certificate file. + - Service `clarity`: Public Key. + - Service `cockroachdb`: Public Key + - Service `db2i_hva`: Public Key + - Service `db2i_sap_hva`: Public Key + - Service `documentdb`: Public Key + - Service `dynamics_365_fo`: Public Key. + - Service `ehr`: Public Key. + - Service `elastic_cloud`: Public Key + - Service `es_self_hosted`: Public Key + - Service `google_cloud_mysql`: Public Key + - Service `google_cloud_postgresql`: Public Key + - Service `google_cloud_sqlserver`: Public Key. + - Service `hana_sap_hva_b1`: Public Key + - Service `hana_sap_hva_ecc`: Public Key + - Service `hana_sap_hva_ecc_netweaver`: Public Key + - Service `hana_sap_hva_s4`: Public Key + - Service `hana_sap_hva_s4_netweaver`: Public Key + - Service `heroku_postgres`: Public Key + - Service `magento_mysql`: Public Key + - Service `magento_mysql_rds`: Public Key + - Service `maria`: Public Key + - Service `maria_azure`: Public Key + - Service `maria_rds`: Public Key + - Service `mongo`: Public Key + - Service `mongo_sharded`: Public Key + - Service `mysql`: Public Key + - Service `mysql_azure`: Public Key + - Service `mysql_rds`: Public Key + - Service `opendistro`: Public Key + - Service `opensearch`: Public Key + - Service `oracle`: Public Key + - Service `oracle_ebs`: Public Key + - Service `oracle_hva`: Public Key + - Service `oracle_rac`: Public Key + - Service `oracle_rds`: Public Key + - Service `oracle_sap_hva`: Public Key + - Service `partnerstack_vendor`: Your PartnerStack Vendor Public key. + - Service `postgres`: Public Key + - Service `postgres_rds`: Public Key + - Service `sap_hana`: Public Key + - Service `sap_s4hana`: Public Key + - Service `sftp`: Public Key + - Service `sql_server`: Public Key. + - Service `sql_server_hva`: Public Key. + - Service `sql_server_rds`: Public Key. + - Service `sql_server_sap_ecc_hva`: Public Key. +- `publication_name` (String) Field usage depends on `service` value: + - Service `aurora_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. + - Service `azure_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. + - Service `google_cloud_postgresql`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. + - Service `heroku_postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. + - Service `postgres`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. + - Service `postgres_rds`: Publication name. Specify only for `"updated_method": "WAL_PGOUTPUT"`. +- `pull_archived_campaigns` (Boolean) Field usage depends on `service` value: + - Service `outbrain`: Include or ignore results from archived campaigns +- `query_id` (String) Field usage depends on `service` value: + - Service `google_display_and_video_360`: The ID of the query whose configuration you want to reuse. This is a required parameter when `config_method` is set to `REUSE_EXISTING`. +- `query_param_value` (String, Sensitive) Field usage depends on `service` value: + - Service `alchemer`: Your Alchemer API key. + - Service `birdeye`: Your Birdeye query-param-value. +- `quota_project_id` (String) Field usage depends on `service` value: + - Service `bigquery_db`: Specify a different project ID to account for quota and billing of Fivetran query workload +- `refresh_token` (String, Sensitive) Field usage depends on `service` value: + - Service `ironsource`: Your Ironsource `Client Secret`. +- `refresh_token_expires_at` (String) Field usage depends on `service` value: + - Service `pinterest_ads`: The expiration date of the refresh token. Unix timestamp in seconds +- `region` (String) Field usage depends on `service` value: + - Service `algolia`: Your Algolia analytics region. + - Service `amazon_ads`: The region used by the Amazon Ads profile. + - Service `amazon_selling_partner`: The region used by the Amazon Selling Partner profile. + - Service `anaplan`: Your Anaplan account region + - Service `atlassian_ops_genie`: Your company's Osgenie region (usually **company**.opsgenie.com) + - Service `awin`: Your Awin Region. + - Service `aws_lambda`: The AWS region code for the DynamoDB instance. + - Service `concur`: The region. + - Service `cvent`: Your Cvent region. + - Service `exact_online`: Your Exact Online region. + - Service `getfeedback`: Your GetFeedback region. + - Service `happyfox`: Your HappyFox region. + - Service `keypay`: Your KeyPay region. + - Service `medallia_agile_research`: Your Medallia Agile region. + - Service `messagebird`: Your MessageBird Account region. + - Service `mixpanel`: Data Region + - Service `navan`: Your Navan region. + - Service `on24`: Your ON24 region. + - Service `pendo`: The Pendo account region. + - Service `proofpoint_security_awareness`: Your Proofpoint Security Awareness Region. + - Service `ringover`: Your Ringover region. + - Service `samsara`: The region of your Samsara account. For instance, if your region is `EUROPE`, provide `eu.samsara` in the `Region` field. If your region is not in Europe, provide `samsara`. + - Service `snyk`: Your Snyk region. + - Service `talkdesk`: Your Talkdesk region (".com",".eu","ca.com") + - Service `totango`: Your Totango region. + - Service `vonage_contact_center`: Your Vonage Contact Center region. + - Service `wasabi_cloud_storage`: The Wasabi Cloud Storage bucket region. Required for connector creation. Default value: `US_EAST_1`. + - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing Region. + - Service `zoho_books`: Your Zoho Books application host region. + - Service `zoho_campaigns`: Your Zoho Campaigns application host region. + - Service `zoho_desk`: Your Zoho Desk domain. + - Service `zoho_inventory`: Your Zoho Inventory application host region. +- `region_api_url` (String) Field usage depends on `service` value: + - Service `amazon_attribution`: Your Amazon Attribution API URL region. +- `region_auth_url` (String) Field usage depends on `service` value: + - Service `amazon_attribution`: Your Amazon Attribution auth URL region. +- `region_token_url` (String) Field usage depends on `service` value: + - Service `amazon_attribution`: Your Amazon Attribution token URL region. +- `region_url` (String) Field usage depends on `service` value: + - Service `playvox_workforce_management`: Your Playvox Workforce Management Region URL. +- `replica_id` (Number) Field usage depends on `service` value: + - Service `aurora`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `google_cloud_mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `magento_mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `magento_mysql_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `maria`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `maria_azure`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `maria_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `mysql`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `mysql_azure`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. + - Service `mysql_rds`: Unique ID within the MySQL replica set. Must be an integer different from all other master and replica servers within the same group. +- `replication_slot` (String) Field usage depends on `service` value: + - Service `aurora_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. + - Service `azure_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. + - Service `google_cloud_postgresql`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. + - Service `heroku_postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. + - Service `postgres`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. + - Service `postgres_rds`: Replication slot name. Specify only for `"updated_method": "WAL"` or `"WAL_PGOUTPUT"`. +- `report_configs` (Block Set) (see [below for nested schema](#nestedblock--config--report_configs)) +- `report_configuration_ids` (Set of String) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: You can select only one Floodlight Configuration ID per account. +- `report_format_type` (String) Field usage depends on `service` value: + - Service `workday`: This is to select report format from JSON and CSV. By default, report format is JSON. +- `report_keys` (String) Field usage depends on `service` value: + - Service `rakutenadvertising`: Your Rakuten Advertising report keys. +- `report_list` (Block Set) (see [below for nested schema](#nestedblock--config--report_list)) +- `report_suites` (Set of String) +- `report_timezone` (String) Field usage depends on `service` value: + - Service `criteo`: Report Timezone +- `report_type` (String) Field usage depends on `service` value: + - Service `adroll`: The report type you want. Default value: `ALL_ADS`. + - Service `double_click_campaign_manager`: Type of reporting data to sync. Default value: `STANDARD`. + - Service `google_display_and_video_360`: The type of the report to create. This is a required parameter when `config_method` is set to `CREATE_NEW`. + - Service `youtube_analytics`: The name of report of which connector will sync the data. +- `report_url` (String) Field usage depends on `service` value: + - Service `workday`: URL for a live custom report. +- `reports` (Block Set) (see [below for nested schema](#nestedblock--config--reports)) +- `reports_linkedin_ads` (Set of String) Field usage depends on `service` value: + - Service `linkedin_ads`: Specific analytics reports to sync. Must be populated if adAnalytics is set to 'SpecificReports'. +- `repositories` (Set of String) Field usage depends on `service` value: + - Service `github`: Specific Repository IDs to sync. Must be populated if `syncMode` is set to `SpecificRepositories`. +- `resource_token` (String, Sensitive) Field usage depends on `service` value: + - Service `cosmos`: A token that provides access to a specific Azure Cosmos DB resource. Required for the `RESOURCE_TOKEN` data access method. +- `resource_url` (String) Field usage depends on `service` value: + - Service `dynamics_365`: URL at which Dynamics 365 is accessed +- `rest_api_limit` (Number) Field usage depends on `service` value: + - Service `pardot`: The number of API calls that the connector should not exceed in a day. Default REST API call limit per day: 150,000. +- `rfc_library_path` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. + - Service `hana_sap_hva_s4_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. + - Service `oracle_sap_hva_netweaver`: Directory path containing the SAP NetWeaver RFC SDK library files. +- `role` (String) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: The NetSuite Role ID for connection. + - Service `snowflake_db`: Snowflake Connector role name +- `role_arn` (String, Sensitive) Field usage depends on `service` value: + - Service `appsflyer`: S3 Role ARN + - Service `aws_cloudtrail`: The Role ARN required for authentication. + - Service `aws_cost`: The Role ARN required for authentication. + - Service `aws_cost_report`: The Role ARN required for authentication. + - Service `aws_inventory`: The Role ARN required for authentication. + - Service `aws_lambda`: The Role ARN required for authentication. + - Service `aws_msk`: If `sasl_mechanism` is set to `IAM`, enter your Role ARN + - Service `cloudfront`: The Role ARN required for authentication. + - Service `dynamodb`: Role ARN + - Service `heap`: The Role ARN required for authentication. + - Service `kinesis`: The Role ARN required for authentication. + - Service `s3`: The Role ARN required for authentication. Required for connector creation when syncing using private bucket. + - Service `segment`: The Role ARN required for authentication. Must be populated if `sync_type` is set to `S3`. +- `rollback_window` (Number) Field usage depends on `service` value: + - Service `appsflyer`: Rollback window +- `rollback_window_size` (Number) Field usage depends on `service` value: + - Service `bingads`: A period of time in days during which a conversion is recorded. +- `s3_bucket` (String) Field usage depends on `service` value: + - Service `webhooks`: The S3 bucket name. Required if `bucket_service` is set to `S3`. +- `s3_export_bucket` (String) Field usage depends on `service` value: + - Service `braze`: Your S3 user export bucket. Required if `AWS_S3` is the `export_storage_type` +- `s3_export_external_id` (String) Field usage depends on `service` value: + - Service `braze`: This is the same as your `group_id`, used if `export_storage_type` is `AWS_S3` +- `s3_export_folder` (String) Field usage depends on `service` value: + - Service `braze`: Your S3 user export folder name. Required if `AWS_S3` is the `export_storage_type` +- `s3_export_role_arn` (String, Sensitive) Field usage depends on `service` value: + - Service `braze`: The Role ARN required for authentication required if `AWS_S3` is the `export_storage_type` +- `s3_role_arn` (String, Sensitive) Field usage depends on `service` value: + - Service `adjust`: Used if the `export_storage_type` is `AWS_S3`, the Role ARN required for authentication. + - Service `webhooks`: The Role ARN required for authentication. Required if `bucket_service` is set to `S3`. +- `s3bucket` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: The S3 bucket name. + - Service `braze`: Your S3 bucket required if `AWS_S3` is the `cloud_storage_type` + - Service `sailthru`: Name of the bucket configured to receive sailthru connect data. +- `s3external_id` (String) Field usage depends on `service` value: + - Service `braze`: This is the same as your `group_id`, used for authentication along with the `role_arn` required if `AWS_S3` is the `cloud_storage_type` + - Service `sailthru`: The external ID is a string that designates who can assume the role. +- `s3folder` (String) Field usage depends on `service` value: + - Service `braze`: Your S3 folder name required if `AWS_S3` is the `cloud_storage_type` +- `s3path` (String) Field usage depends on `service` value: + - Service `sailthru`: Copy and use this to configure Sailthru Connect in your sailthru account. +- `s3role_arn` (String, Sensitive) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: The Role ARN required for authentication. + - Service `braze`: The Role ARN required for authentication required if `AWS_S3` is the `cloud_storage_type` + - Service `sailthru`: Role ARN of the IAM role created for Fivetran. +- `sales_account_sync_mode` (String) Field usage depends on `service` value: + - Service `itunes_connect`: Whether to sync all sales accounts or specific sales accounts. +- `sales_accounts` (Set of String) Field usage depends on `service` value: + - Service `itunes_connect`: Specific sales account to sync. Must be populated if `sales_account_sync_mode` is set to `SpecificSalesAccounts`. +- `salesforce_security_token` (String, Sensitive) Field usage depends on `service` value: + - Service `pardot`: The Pardot user's Salesforce SSO Account Security Token. +- `sandbox_account` (String) Field usage depends on `service` value: + - Service `gocardless`: Your GoCardless account type. +- `sap_schema` (String) Field usage depends on `service` value: + - Service `db2i_hva`: The SAP schema. + - Service `db2i_sap_hva`: SAP schema name. + - Service `sql_server_sap_ecc_hva`: SAP Schema Name. +- `sap_source_schema` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_b1`: The schema name where the HANA tables reside. + - Service `hana_sap_hva_ecc`: The Hana schema name where the SAP tables reside. + - Service `hana_sap_hva_ecc_netweaver`: The Hana schema name where the SAP tables reside. + - Service `hana_sap_hva_s4`: The Hana schema name where the SAP tables reside. + - Service `hana_sap_hva_s4_netweaver`: The Hana schema name where the SAP tables reside. +- `sap_user` (String) Field usage depends on `service` value: + - Service `oracle_sap_hva`: The Oracle schema name where the SAP tables reside. +- `sasl_mechanism` (String) Field usage depends on `service` value: + - Service `apache_kafka`: SASL Mechanism + - Service `aws_msk`: If `security_protocol` is set to `SASL`, enter the SASL Mechanism +- `sasl_plain_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Key +- `sasl_plain_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Secret +- `sasl_scram256_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Key +- `sasl_scram256_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Secret +- `sasl_scram512_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Key + - Service `aws_msk`: If `sasl_mechanism` is set to `SCRAM_SHA_512`, enter your secret's `saslScram512Key`. +- `sasl_scram512_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: API Secret + - Service `aws_msk`: If `sasl_mechanism` is set to `SCRAM_SHA_512`, enter your secret's `saslScram512Key`. +- `schema_registry_credentials_source` (String) Field usage depends on `service` value: + - Service `apache_kafka`: Schema Registry Credentials source + - Service `aws_msk`: Schema Registry Credentials source + - Service `confluent_cloud`: Schema Registry Credentials source +- `schema_registry_key` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: Schema Registry Key + - Service `aws_msk`: Schema Registry Key + - Service `azure_service_bus`: The key used to access the schema registry. Required for the `avro` and `protobuf` message types + - Service `confluent_cloud`: Schema Registry Key +- `schema_registry_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: Schema Registry Secret + - Service `aws_msk`: Schema Registry Secret + - Service `azure_service_bus`: The secret used to access the schema registry. Required for the `avro` and `protobuf` message types + - Service `confluent_cloud`: Schema Registry Secret +- `schema_registry_urls` (Set of String) Field usage depends on `service` value: + - Service `apache_kafka`: Your schema registry URLs + - Service `aws_msk`: Your schema registry URLs + - Service `azure_service_bus`: The comma-separated list of schema registry servers in the `server:port` format + - Service `confluent_cloud`: Your schema registry URLs +- `scope` (String) Field usage depends on `service` value: + - Service `ebay`: Your eBay scopes. +- `seats` (Set of String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Specific Seats to sync. Must be populated if `sync_mode_seat` is set to `SPECIFIC_SEATS`. +- `secret` (String, Sensitive) Field usage depends on `service` value: + - Service `appcues`: Your Appcues Secret. + - Service `buildium`: Your Buildium API secret. + - Service `loopio`: Your Loopio Secret. + - Service `mode`: Your Mode Secret. + - Service `playvox_workforce_management`: Your Playvox Workforce Management Secret. + - Service `twilio`: The Twilio API secret + - Service `uservoice`: The UserVoice API secret. + - Service `vts`: Your VTS secret. +- `secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `afterpay`: Your Afterpay Secret Key. + - Service `appsflyer`: Your AWS secret key. + - Service `bigquery_db`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. + - Service `checkr`: Your Checkr secret key. + - Service `ezofficeinventory`: Your EZOfficeInventory API secret key. + - Service `gcs`: Your JSON Private Key. Used to authorize service account. Required if you use a Custom Service Account to authenticate the storage bucket. + - Service `ironsource`: Your Ironsource `Client ID`. + - Service `partnerstack_vendor`: Your PartnerStack Vendor Secret key. + - Service `paypal`: `Client Secret` of your PayPal client application. + - Service `paypal_sandbox`: `Client Secret` of your PayPal client application. + - Service `retailnext`: Your RetailNext secret key. + - Service `statsig`: Your Statsig secret key. + - Service `yotpo`: Your Yotpo Secret key +- `secrets` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_lambda`: The secrets that should be passed to the function at runtime. + - Service `azure_function`: The secrets that should be passed to the function at runtime. + - Service `google_cloud_function`: The secrets that should be passed to the function at runtime. +- `secrets_list` (Block Set) (see [below for nested schema](#nestedblock--config--secrets_list)) +- `security_protocol` (String) Field usage depends on `service` value: + - Service `apache_kafka`: Security protocol for Kafka interaction. + - Service `aws_msk`: The security protocol for Kafka interaction. + - Service `confluent_cloud`: Security protocol for Confluent Cloud interaction. + - Service `heroku_kafka`: Security protocol for Heroku Kafka interaction. +- `segments` (Set of String) Field usage depends on `service` value: + - Service `google_analytics`: A segment is a subset of your Analytics data that is made up of one or more non-destructive filters (filters that do not alter the underlying data). Those filters isolate subsets of users, sessions, and hits. +- `selected_event_types` (Set of String) Field usage depends on `service` value: + - Service `salesforce_marketing_cloud`: Select the event types to be synced. +- `selected_exports` (Set of String) Field usage depends on `service` value: + - Service `anaplan`: The list of export IDs in the format `workspace_id_model_id_export_id` that the connector will sync. Must be populated if `sync_mode` is set to `SpecificExports`. +- `sender_id` (String) Field usage depends on `service` value: + - Service `sage_intacct`: Your Sender ID +- `sender_password` (String, Sensitive) Field usage depends on `service` value: + - Service `sage_intacct`: Your Sender Password +- `server` (String) Field usage depends on `service` value: + - Service `castor_edc`: Your Castor EDC Server. +- `server_address` (String) Field usage depends on `service` value: + - Service `tableau_source`: Your Tableau Source server address. +- `server_url` (String) Field usage depends on `service` value: + - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence Instance URL. + - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud Instance URL. + - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud Instance URL. + - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud Instance URL. +- `server_variable` (String) Field usage depends on `service` value: + - Service `myosh`: Your myosh server variable. +- `servers` (Set of String) Field usage depends on `service` value: + - Service `apache_kafka`: Comma-separated list of Kafka servers in the format `server:port`. + - Service `aws_msk`: Comma-separated list of Kafka servers in the `server:port` format. + - Service `confluent_cloud`: Comma-separated list of Confluent Cloud servers in the format `server:port`. + - Service `heroku_kafka`: Comma-separated list of Heroku Kafka servers in the format `server:port`. +- `service_account` (String) Field usage depends on `service` value: + - Service `google_drive`: Share the folder with the email address +- `service_account_email` (String) Field usage depends on `service` value: + - Service `google_cloud_function`: Provide Invoker role to this service account. +- `service_account_key` (String, Sensitive) Field usage depends on `service` value: + - Service `firebase`: The contents of your service account key file. Required for authentication. +- `service_account_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `mixpanel`: Service Account Secret +- `service_account_username` (String) Field usage depends on `service` value: + - Service `mixpanel`: Service Account Username +- `service_authentication` (String, Sensitive) Field usage depends on `service` value: + - Service `dsv`: A base64 encoded variant of your `username:password` string. Required for authentication. +- `service_name` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to Service. + - Service `hana_sap_hva_s4_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to Service. + - Service `walmart_marketplace`: Your Walmart Marketplace service name. +- `sftp_host` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: SFTP host. + - Service `salesforce_marketing_cloud`: Host +- `sftp_is_key_pair` (Boolean) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Log in with key pair or password + - Service `salesforce_marketing_cloud`: Set this field if you use a key pair for logging into your SFTP server. Don't set it if you use a username and password +- `sftp_password` (String, Sensitive) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: SFTP password required if sftp_is_key_pair is false + - Service `salesforce_marketing_cloud`: Password +- `sftp_port` (Number) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: SFTP port. + - Service `salesforce_marketing_cloud`: Port +- `sftp_public_key` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: Public Key + - Service `salesforce_marketing_cloud`: Public Key +- `sftp_user` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: SFTP user. + - Service `salesforce_marketing_cloud`: User +- `share_url` (String) Field usage depends on `service` value: + - Service `share_point`: Your SharePoint folder URL. You can find the folder URL by following the steps mentioned [here](https://fivetran.com/docs/connectors/files/share-point/setup-guide). +- `sheet_id` (String) Field usage depends on `service` value: + - Service `google_sheets`: The URL of the sheet that can be copied from the browser address bar, or the ID of the sheet that can be found in the sheet's URL between **/d/** and **/edit**. +- `shop` (String) Field usage depends on `service` value: + - Service `shopify`: The Shopify shop name. Can be found in the URL before **.myshopify.com**. +- `short_code` (String, Sensitive) Field usage depends on `service` value: + - Service `salesforce_commerce_cloud`: The Salesforce eight-character string assigned to a realm for routing purposes. +- `should_sync_events_with_deleted_profiles` (Boolean) Field usage depends on `service` value: + - Service `klaviyo`: Sync events linked to deleted profiles +- `show_records_with_no_metrics` (Boolean) Field usage depends on `service` value: + - Service `apple_search_ads`: Turn the toggle on if you want the reports to also return records without metrics. +- `sid` (String) Field usage depends on `service` value: + - Service `twilio`: The Twilio API key SID +- `signer_public_key` (String) Field usage depends on `service` value: + - Service `azure_blob_storage`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. + - Service `ftp`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. + - Service `gcs`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. + - Service `s3`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. + - Service `sftp`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. + - Service `wasabi_cloud_storage`: The contents of the signer's public key file. Must be populated if `use_pgp_encryption_options` is set to `true` and PGP encrypted files are signed. +- `site_address` (String) Field usage depends on `service` value: + - Service `teamwork`: Your Teamwork site address. +- `site_id` (String) Field usage depends on `service` value: + - Service `microsoft_lists`: The Site ID of the SharePoint site from which you want to sync your lists. The Site ID is the `id` field in the [Graph API](https://docs.microsoft.com/en-us/graph/api/site-search?view=graph-rest-1.0&tabs=http) response for sites. + - Service `salesforce_commerce_cloud`: The name of the site from which you want to sync data. +- `site_name` (String) Field usage depends on `service` value: + - Service `microsoft_lists`: The Name of the SharePoint site. The Site Name is the `name` field in the Graph API response for sites. + - Service `tableau_source`: Your Tableau Source site name. +- `site_urls` (Set of String) Field usage depends on `service` value: + - Service `google_search_console`: Specific Site URLs to sync. Must be populated if `sync_mode` is set to `SpecificSites`. +- `skip_after` (Number) Field usage depends on `service` value: + - Service `aws_cost_report`: Enter 1 or greater + - Service `azure_blob_storage`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `box`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `dropbox`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `email`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `ftp`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `gcs`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `google_drive`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `kinesis`: Enter 1 or greater + - Service `s3`: We will skip over the number of lines specified at the end to avoid introducing aberrant data into your destination. + - Service `sftp`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `share_point`: We will skip over the number of lines specified at the end so as to not introduce aberrant data into your destination. + - Service `wasabi_cloud_storage`: We will skip over the number of lines specified at the end to avoid introducing aberrant data into your destination. +- `skip_before` (Number) Field usage depends on `service` value: + - Service `aws_cost_report`: Enter 1 or greater + - Service `azure_blob_storage`: We will skip over the number of lines specified before syncing data. + - Service `box`: We will skip over the number of lines specified before syncing data. + - Service `dropbox`: We will skip over the number of lines specified before syncing data. + - Service `email`: We will skip over the number of lines specified before syncing data. + - Service `ftp`: We will skip over the number of lines specified before syncing data. + - Service `gcs`: We will skip over the number of lines specified before syncing data. + - Service `google_drive`: We will skip over the number of lines specified before syncing data. + - Service `kinesis`: Enter 1 or greater + - Service `s3`: We will skip over the number of lines specified before syncing data. + - Service `sftp`: We will skip over the number of lines specified before syncing data. + - Service `share_point`: We will skip over the number of lines specified before syncing data. + - Service `wasabi_cloud_storage`: We will skip over the number of lines specified before syncing data. +- `skip_empty_reports` (Boolean) Field usage depends on `service` value: + - Service `google_ads`: Toggles the ["Skip empty reports"](https://fivetran.com/docs/connectors/applications/google-ads#skipemptyreports) feature. Enabled by default +- `snc_certificate` (String, Sensitive) +- `snc_certificate_source` (String, Sensitive) +- `snc_fivetran_name` (String) +- `snc_library_path` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Path to the external security product's library. + - Service `hana_sap_hva_s4_netweaver`: Path to the external security product's library. +- `snc_mode` (String) +- `snc_my_name` (String) +- `snc_name` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Client SNC name. + - Service `hana_sap_hva_s4_netweaver`: Client SNC name. +- `snc_partner_name` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Communication partner's SNC name. + - Service `hana_sap_hva_s4_netweaver`: Communication partner's SNC name. +- `snc_source_name` (String) +- `soap_uri` (String) Field usage depends on `service` value: + - Service `marketo`: Marketo SOAP API Endpoint. +- `social_data_sync_timeframe` (String) Field usage depends on `service` value: + - Service `linkedin_company_pages`: The social data (UGCPosts, Shares, Comments) sync time frame in months. Default value: `SIX` . +- `source` (String) Field usage depends on `service` value: + - Service `adobe_analytics_data_feed`: The data source. +- `store_hash` (String) Field usage depends on `service` value: + - Service `big_commerce`: The BigCommerce store hash. +- `store_id` (String) Field usage depends on `service` value: + - Service `reviewsio`: Your REVIEWS.io store ID +- `sub_domain` (String) Field usage depends on `service` value: + - Service `absorb_lms`: Your Absorb LMS subdomain. + - Service `activecampaign`: Your ActiveCampaign sub-domain. + - Service `acumatica`: Your Acumatica subdomain. + - Service `ada`: Your Ada sub-domain. + - Service `alchemer`: Your Alchemer sub-domain. + - Service `atlassian_jira_align`: Your Jira Align base URL. + - Service `azure_boards`: Your Azure Boards Organization Name. + - Service `azure_devops`: Your Azure Organization Name + - Service `betterworks`: Your Betterworks subdomain. + - Service `bubble`: Your Bubble subdomain. + - Service `buildium`: Your Buildium subdomain. + - Service `canvas_by_instructure`: Your Canvas by Instructure domain. + - Service `chargebee_product_catalog_1`: Your Chargebee Product Catalog 1 subdomain. + - Service `chargebee_product_catalog_2`: Your Chargebee subdomain. + - Service `checkr`: Your Checkr subdomain. + - Service `clubspeed`: Your Clubspeed subdomain. + - Service `collibra`: Your collibra subdomain. + - Service `concord`: Your Concord Sub Domain. + - Service `contrast_security`: Your Contrast Security subdomain. + - Service `customerio`: Your Customer.io region-specific Subdomain. + - Service `dbt_cloud`: Your dbt Cloud API server region. + - Service `deputy`: Your Deputy subdomain. + - Service `docebo`: Your Docebo subdomain. + - Service `drata`: Your Drata sub_domain. + - Service `eventsforce`: Your Eventsforce subdomain. + - Service `ezofficeinventory`: Your EZOfficeInventory Subdomain. + - Service `fountain`: Your Fountain subdomain. + - Service `freshchat`: Your Freshchat Sub Domain + - Service `gainsight_customer_success`: The subdomain of your Gainsight account. + - Service `gainsight_product_experience`: Your Gainsight Product Experience subdomain. + - Service `genesys`: Your Genesys subdomain. + - Service `green_power_monitor`: Your GreenPowerMonitor subdomain. + - Service `infobip`: Your Infobip sub_domain. + - Service `insightly`: Your company's Insightly subdomain name. + - Service `instructure`: The Sub domain in which your Instructure account is hosted. + - Service `jamf`: Your Jamf subdomain. + - Service `kandji`: Your Kandji Subdomain. + - Service `khoros_care`: Your Khoros Care subDomain. + - Service `looker_source`: Your looker SubDomain name. + - Service `mailgun`: Your Mailgun subdomain. + - Service `maxio_chargify`: Enter Your Subdomain. + - Service `myosh`: Your myosh subdomain. + - Service `namely`: Your Namely subdomain. + - Service `nylas`: Your Nylas subdomain. + - Service `okta`: Your Okta subdomain. + - Service `picqer`: Your Picqer subdomain. + - Service `pinpoint`: Your Pinpoint sub domain name. + - Service `piwik_pro`: Your Piwik PRO subdomain. + - Service `playvox`: Your Playvox Subdomain. + - Service `posthog`: Your PostHog data region (`app` or `eu`). + - Service `recurly`: Your company's Recurly subdomain. + - Service `reltio`: Your Reltio subdomain. + - Service `revel`: Your Revel Systems subDomain. + - Service `rundeck`: Your Rundeck subdomain. + - Service `sage_hr`: Your Sage HR subdomain. + - Service `salesforce_marketing_cloud`: Your Salesforce Marketing Cloud subdomain. + - Service `salsify`: Your Salsify Organization ID. + - Service `sap_success_factors`: Your SAP SuccessFactors Subdomain. + - Service `sonarqube`: Your Sonarqube subdomain. + - Service `starrez`: Your StarRez subdomain + - Service `tableau_source`: Your Tableau Source subdomain. + - Service `tempo`: Your Tempo subdomain. + - Service `testrail`: Your TestRail subdomain. + - Service `thinkific`: Your Thinkific subdomain. + - Service `totango`: Your Totango Subdomain. + - Service `tymeshift`: Your Tymeshift subdomain. + - Service `upland`: Your Upland Software subDomain. + - Service `wordpress`: Your WordPress subdomain. + - Service `workable`: Your Workable Subdomain. + - Service `wrike`: Your Wrike Subdomain. +- `subdomain` (String) Field usage depends on `service` value: + - Service `bamboohr`: The subdomain used to access your account. If you access BambooHR at 'https://mycompany.bamboohr.com', then the subdomain is 'mycompany'. + - Service `datadog`: Your Datadog subdomain. + - Service `ebay`: Your eBay environment. + - Service `freshdesk`: Your company's freshdesk subdomain (usually **company**.freshdesk.com). + - Service `freshsales`: Your Freshsales domain. + - Service `freshservice`: Your company's freshservice subdomain (usually **company**.freshservice.com). + - Service `freshsuccess`: Your Freshsuccess subdomain. + - Service `gorgias`: Your Gorgias subdomain. + - Service `jama_software`: Your Jama Software subdomain. + - Service `klarna`: Your Klarna subdomain. + - Service `learnupon`: Your Learnupon subdomain. + - Service `maxio_saasoptics`: Your Maxio SaaSOptics subdomain. + - Service `medallia`: Medallia subdomain + - Service `skillstx`: Your SkillsTX subdomain. + - Service `smarthr`: Your SmartHR subdomain. + - Service `sonarqube`: Your Sonarqube subdomain. + - Service `toast`: Your Toast domain. + - Service `vts`: Your VTS Subdomain. + - Service `zendesk_chat`: Your Zendesk domain. +- `subscription` (String) Field usage depends on `service` value: + - Service `retailnext`: Your RetailNext subscription. +- `subscription_key` (String, Sensitive) Field usage depends on `service` value: + - Service `dsv`: Your DSV subscription key. +- `support_connected_accounts_sync` (Boolean) Field usage depends on `service` value: + - Service `stripe`: Sync Connected Accounts. Connected Account Documentation - https://stripe.com/docs/api/connected_accounts. + - Service `stripe_test`: Sync Connected Accounts. Connected Account Documentation - https://stripe.com/docs/api/connected_accounts. +- `support_nested_columns` (Boolean) Field usage depends on `service` value: + - Service `workday`: This option is to unpack the nested columns and sync them separately. By default, we sync the nested columns as JSON objects. +- `survey_ids` (String) Field usage depends on `service` value: + - Service `qualaroo`: Array of Qualaroo Survey IDs. +- `swipe_attribution_window` (String) Field usage depends on `service` value: + - Service `snapchat_ads`: The time period to attribute conversions based on swipes. Default value: `DAY_28` +- `sync_data_locker` (Boolean) Field usage depends on `service` value: + - Service `appsflyer`: Sync AppsFlyer Data Locker. Default value is `true`, set it to `false` to sync AppsFlyer data using only webhooks. +- `sync_format` (String) Field usage depends on `service` value: + - Service `webhooks`: The webhooks sync format. Default value: `Unpacked`. Unpacked messages must be valid JSON. +- `sync_formula_fields` (Boolean) Field usage depends on `service` value: + - Service `salesforce`: (optional) Configuration to enable syncing formulaFields. Make sure its value is `true` or `false` + - Service `salesforce_sandbox`: (Optional) Sync formula fields (default value = `false`). +- `sync_metadata` (Boolean) Field usage depends on `service` value: + - Service `facebook_ads`: Parameter defining whether to enable or disable metadata synchronisation. Default value: `TRUE`. +- `sync_method` (String) Field usage depends on `service` value: + - Service `aws_lambda`: Sync Method +- `sync_mode` (String) Field usage depends on `service` value: + - Service `adroll`: Whether to sync all advertisables or specific advertisables. Default value: `AllAdvertisables`. + - Service `amazon_ads`: Option to select connector should sync all profiles or specific profiles. + - Service `anaplan`: Whether to sync all exports or specific exports. + - Service `apple_search_ads`: Sync Mode + - Service `asana`: Whether to sync all projects or specific projects. + - Service `bingads`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. + - Service `double_click_campaign_manager`: Whether to sync all user profiles or specific ones. Default value: `AllAccounts`. + - Service `dynamodb`: Whether to sync all tables in unpacked mode only or specific tables in packed mode. Default value: `UseUnpackedModeOnly`. + - Service `facebook`: Option to select connector should sync all accounts or specific accounts. [Possible sync_mode values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#syncmode). + - Service `facebook_ad_account`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. + - Service `facebook_ads`: Option to select connector should sync all accounts or specific accounts. [Possible sync_mode values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#syncmode). + - Service `facebook_pages`: Whether to sync all accounts or specific accounts. Default value: `AllPages`. + - Service `github`: Whether to sync all repositories or specific repositories. + - Service `google_ads`: Whether to sync all accounts or specific accounts. + - Service `google_analytics`: Whether to sync all accounts or specific accounts. + - Service `google_analytics_4`: Whether to sync all accounts or specific accounts. + - Service `google_analytics_mcf`: Whether to sync all accounts or specific accounts. Default value: `ALL_ACCOUNTS` + - Service `google_search_console`: Whether to sync all sites or specific sites. + - Service `instagram_business`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. + - Service `iterable`: Events Sync Mode. + - Service `jira`: Whether to sync all projects or specific projects. + - Service `linkedin_ads`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts` + - Service `pendo`: Whether to sync all App IDs or specific App IDs. Default value: `AllAppIds`. + - Service `pinterest_ads`: Whether to sync all advertisers or specific advertisers. + - Service `reddit_ads`: Whether to sync all accounts or specific accounts. + - Service `salesforce_marketing_cloud`: Select the sync mode. + - Service `snapchat_ads`: Whether to sync all organizations or specific organizations. Default value: `AllOrganizations`. + - Service `spotify_ads`: Whether to sync all ad accounts or specific ad accounts. + - Service `taboola`: Whether to sync all accounts or specific accounts. + - Service `the_trade_desk`: Whether to sync all accounts or specific accounts. + - Service `tiktok_ads`: Whether to sync all advertiser accounts or specific accounts. + - Service `twilio`: Whether to sync all accounts or specific accounts. + - Service `twitter`: Whether to sync all accounts or specific accounts. Default value: `AllAccounts`. + - Service `twitter_ads`: Whether to sync all accounts or specific accounts. + - Service `walmart_dsp`: Whether to sync all accounts or specific accounts. + - Service `yahoo_gemini`: Whether to sync all accounts or specific accounts. Default value: `SpecificAccounts`. + - Service `zuora`: Select `Zuora Billing` to sync exclusively Zuora Billing related records. Choose `Zuora Revenue` for syncing only Zuora Revenue reports. If both Zuora Billing records and Zuora Revenue reports are to be synced, opt for `Both`. + - Service `zuora_sandbox`: Select `Zuora Billing` to sync exclusively Zuora Billing related records. Choose `Zuora Revenue` for syncing only Zuora Revenue reports. If both Zuora Billing records and Zuora Revenue reports are to be synced, opt for `Both`. +- `sync_mode_advertiser` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Whether to sync all advertisers or specific advertisers. Default value: `ALL_ADVERTISERS`. +- `sync_mode_seat` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Whether to sync all seats or specific seats. Default value: `ALL_SEATS`. +- `sync_multiple_accounts` (Boolean) Field usage depends on `service` value: + - Service `reddit_ads`: When this parameter is set to `true`, we sync the data of the additional linked accounts. When this parameter is set to `false`, we sync only the data from the main account that was used for authorization +- `sync_pack_mode` (String) Field usage depends on `service` value: + - Service `cosmos`: The packing mode type. Supported values:`STANDARD_UNPACKED_MODE`- Unpacks _one_ layer of nested fields and infers types.`PACKED_MODE`- Delivers packed data as a single destination column value.Learn more in our [Azure Cosmos DB Sync Pack Mode Options documentation](https://fivetran.com/docs/connectors/databases/cosmos#packmodeoptions). + - Service `documentdb`: Indicates whether synced data will be packed into a single entry(column), or unpacked with one layer of nested fields. +- `sync_pull_api` (Boolean) Field usage depends on `service` value: + - Service `appsflyer`: These options are for Appsflyer's Pull API, and are only necessary for syncing events from Pull API. +- `sync_type` (String) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka sync type. Unpacked messages must be valid JSON. + - Service `aws_msk`: The sync type. Unpacked messages must be valid JSON. + - Service `azure_event_hub`: Sync type. Unpacked messages must be valid JSON. + - Service `azure_service_bus`: The sync type, which is based on the message type. For `text` and `xml`, `packed` is supported. For `protobuf` and `avro`, `unpacked` is supported. For `json`, both `packed` and `unpacked` are supported + - Service `confluent_cloud`: Kafka sync type. Unpacked messages must be valid JSON. + - Service `heroku_kafka`: Heroku Kafka sync type. Unpacked messages must be valid JSON. + - Service `segment`: The Segment connector sync type. +- `sysnr` (String) +- `system_id` (String) Field usage depends on `service` value: + - Service `hana_sap_hva_ecc_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to System ID. + - Service `hana_sap_hva_s4_netweaver`: Unique identifier sapsid of the SAP system. This field is displayed only when the REMOTE SERVICE IDENTIFICATION is set to System ID. +- `table_name` (String) Field usage depends on `service` value: + - Service `airtable`: Name of table in Airtable +- `target_entity_id` (String) Field usage depends on `service` value: + - Service `culture_amp`: Your Culture Amp Target entity ID. +- `target_host` (String) Field usage depends on `service` value: + - Service `d2l_brightspace`: Your D2L Brightspace target host. +- `tde_certificate` (String, Sensitive) Field usage depends on `service` value: + - Service `sql_server_hva`: Certificate used to protect a database encryption key + - Service `sql_server_sap_ecc_hva`: Certificate used to protect a database encryption key +- `tde_certificate_name` (String) Field usage depends on `service` value: + - Service `sql_server_hva`: Name of the Certificate used to protect a database encryption key + - Service `sql_server_sap_ecc_hva`: Name of the Certificate used to protect a database encryption key +- `tde_password` (String, Sensitive) Field usage depends on `service` value: + - Service `sql_server_hva`: Password of the TDE private key + - Service `sql_server_sap_ecc_hva`: Password of the TDE private key +- `tde_private_key` (String, Sensitive) Field usage depends on `service` value: + - Service `sql_server_hva`: Private key associated with the TDE certificate + - Service `sql_server_sap_ecc_hva`: Private key associated with the TDE certificate +- `team_id` (String) Field usage depends on `service` value: + - Service `asana`: Team ID +- `technical_account_id` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: Technical Account ID from the Service Account (JWT) credentials of your Adobe Project. +- `template_labels` (Set of String) Field usage depends on `service` value: + - Service `mandrill`: Provide the labels to filter the templates +- `tenant` (String) Field usage depends on `service` value: + - Service `microsoft_entra_id`: Your Microsoft Entra ID Tenant. + - Service `microsoft_teams`: Your Microsoft Teams Tenant. + - Service `unicommerce`: Your uniware tenant. + - Service `workday`: Workday tenant name + - Service `workday_financial_management`: Workday tenant name + - Service `workday_hcm`: Workday tenant name +- `tenant_app_url` (String) Field usage depends on `service` value: + - Service `planful`: Your Planful tenant app URL. +- `tenant_configs` (Block Set) (see [below for nested schema](#nestedblock--config--tenant_configs)) +- `tenant_id` (String, Sensitive) Field usage depends on `service` value: + - Service `azure_sql_db`: Azure AD tenant ID. + - Service `azure_sql_managed_db`: Azure AD tenant ID. + - Service `business_central`: `Tenant ID` of your Business Central application + - Service `crowddev`: Your crowd.dev Tenant ID. + - Service `reltio`: Your Reltio tenant ID. + - Service `servicetitan`: Your ServiceTitan tenant ID. + - Service `visma`: Your Visma tenant ID. +- `tenant_name` (String) Field usage depends on `service` value: + - Service `mambu`: Your Mambu tenant name. +- `tenant_url` (String) Field usage depends on `service` value: + - Service `ivanti`: Your Ivanti Tenant URL. + - Service `playvox_workforce_management`: Your Playvox Workforce Management Tenant URL. + - Service `reltio`: Your Reltio tenant URL. +- `test_table_name` (String) Field usage depends on `service` value: + - Service `sap_hana`: testTableName + - Service `sap_s4hana`: testTableName +- `time_zone` (String) Field usage depends on `service` value: + - Service `pardot`: The time zone configured in your Pardot instance. An empty value defaults to `UTC+00:00`. +- `timeframe_months` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector has been created. Default value: `TWELVE` . + - Service `adroll`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. + - Service `apple_search_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. + - Service `bingads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. + - Service `criteo`: The number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. NOTE: The greater number of months means the initial sync will take more time. + - Service `double_click_campaign_manager`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. Default value: `TWELVE`. + - Service `double_click_publishers`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. + - Service `facebook`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `facebook_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `google_ads`: The number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled custom and prebuilt reports during the next connector sync. Default value: `TWELVE`. + - Service `google_analytics`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. The default value: `TWELVE`. + - Service `google_analytics_4`: The number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. The default value: `TWELVE`. + - Service `google_analytics_mcf`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. Default value: `TWELVE`. + - Service `google_display_and_video_360`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. NOTE: The more months of reporting data you sync, the longer your initial sync will take. + - Service `google_search_ads_360`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. + - Service `google_search_console`: Number of months' worth of reporting data you'd like to include in your initial sync. A change of this value will trigger a re-sync for enabled reports during the next connector sync. + - Service `instagram_business`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. + - Service `itunes_connect`: Historical sync time frame in months. + - Service `linkedin_ads`: Number of months for which to query reporting data included in the initial sync. This number cannot be modified once the connector is created. Default value: `ALL_TIME`. + - Service `outbrain`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. + - Service `pinterest_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `reddit_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. + - Service `snapchat_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. + - Service `spotify_ads`: The number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `SIX`. + - Service `taboola`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once connection is created. NOTE: The more months of reporting data you sync, the longer your initial sync will take. + - Service `the_trade_desk`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `tiktok_ads`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `twitter`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `twitter_ads`: Historical sync timeframe in months. + - Service `walmart_dsp`: Number of months' worth of data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `yahoo_dsp`: Number of months` worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `THREE`. + - Service `yahoo_gemini`: Number of months' worth of reporting data you'd like to include in your initial sync. This cannot be modified once the connector is created. Default value: `TWELVE`. +- `tns` (String) Field usage depends on `service` value: + - Service `oracle_hva`: Single-tenant database: The database's SID. Multi-tenant database: The database's TNS. + - Service `oracle_sap_hva`: Single-tenant database: The database SID. Multi-tenant database: The database TNS. +- `toast_id` (String) Field usage depends on `service` value: + - Service `toast`: Your Toast Restaurant External ID. +- `token` (String, Sensitive) Field usage depends on `service` value: + - Service `mode`: Your Mode Token. + - Service `oracle_moat_analytics`: Your Oracle Moat Analytics Token. + - Service `solarwinds_service_desk`: Your SolarWinds Service Desk token. +- `token_authenticated_container` (String) Field usage depends on `service` value: + - Service `cosmos`: The container name. Required for the `RESOURCE_TOKEN` data access method. +- `token_authenticated_database` (String) Field usage depends on `service` value: + - Service `cosmos`: The database name. Required for the `RESOURCE_TOKEN` data access method. +- `token_id` (String, Sensitive) Field usage depends on `service` value: + - Service `chargedesk`: Your ChargeDesk token ID. + - Service `mux`: Your Mux token ID +- `token_key` (String, Sensitive) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: Token ID + - Service `on24`: Your ON24 token key. + - Service `proofpoint_security_awareness`: Your Proofpoint Security Awareness Token Key. +- `token_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `netsuite_suiteanalytics`: Token Secret + - Service `on24`: Your ON24 token secret. +- `token_secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `mux`: Your Mux token secret key +- `topics` (Set of String) Field usage depends on `service` value: + - Service `azure_service_bus`: The comma-separated list of topics which should be synced. Required if you do not have manage permissions +- `trust_store_type` (String) Field usage depends on `service` value: + - Service `heroku_kafka`: Trust Store Type +- `trusted_cert` (String, Sensitive) Field usage depends on `service` value: + - Service `apache_kafka`: Kafka trusted certificate. + - Service `heroku_kafka`: Heroku Kafka trusted certificate. Required for `TLS` security protocol. +- `truststore` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_msk`: If `security_protocol` is set to `TLS`, add the `Truststore File` as Base64 encoded string. +- `tunnel_host` (String) Field usage depends on `service` value: + - Service `aurora`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `aurora_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `azure_cosmos_for_mongo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `azure_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `azure_sql_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `azure_sql_managed_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `clarity`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `cockroachdb`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `db2i_hva`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). Required for connector creation. + - Service `db2i_sap_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `documentdb`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). Required for connector creation. + - Service `dynamics_365_fo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `ehr`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `elastic_cloud`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). + - Service `es_self_hosted`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). + - Service `google_cloud_mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `google_cloud_postgresql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `google_cloud_sqlserver`: SSH host, only specify when connecting via an SSH tunnel (do not use a load balancer). + - Service `hana_sap_hva_b1`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `hana_sap_hva_ecc`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `hana_sap_hva_ecc_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `hana_sap_hva_s4`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `hana_sap_hva_s4_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `heroku_postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `magento_mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `magento_mysql_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `maria`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `maria_azure`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `maria_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `mongo`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `mongo_sharded`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `mysql`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `mysql_azure`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `mysql_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `opendistro`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). + - Service `opensearch`: SSH host, specify only to connect using an SSH tunnel (do not use a load balancer). + - Service `oracle`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_ebs`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_rac`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_sap_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `oracle_sap_hva_netweaver`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `postgres`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `postgres_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sap_hana`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sap_hana_db`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sap_s4hana`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sftp`: Tunnel host address, specify only to connect via SSH tunnel. + - Service `sql_server`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sql_server_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sql_server_rds`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). + - Service `sql_server_sap_ecc_hva`: SSH host, specify only to connect via an SSH tunnel (do not use a load balancer). +- `tunnel_port` (Number) Field usage depends on `service` value: + - Service `aurora`: SSH port, specify only to connect via an SSH tunnel. + - Service `aurora_postgres`: SSH port, specify only to connect via an SSH tunnel. + - Service `azure_cosmos_for_mongo`: SSH port, specify only to connect via an SSH tunnel. + - Service `azure_postgres`: SSH port, specify only to connect via an SSH tunnel. + - Service `azure_sql_db`: SSH port, specify only to connect via an SSH tunnel. + - Service `azure_sql_managed_db`: SSH port, specify only to connect via an SSH tunnel. + - Service `clarity`: SSH port, specify only to connect via an SSH tunnel. + - Service `cockroachdb`: SSH port, specify only to connect via an SSH tunnel. + - Service `db2i_hva`: SSH port, only specify when connecting via an SSH tunnel. Required for connector creation. + - Service `db2i_sap_hva`: SSH port, specify only to connect via an SSH tunnel. + - Service `documentdb`: SSH port, only specify when connecting via an SSH tunnel. Required for connector creation. + - Service `dynamics_365_fo`: SSH port, specify only to connect via an SSH tunnel. + - Service `ehr`: SSH port, specify only to connect via an SSH tunnel. + - Service `elastic_cloud`: SSH port, specify only to connect using an SSH tunnel. + - Service `es_self_hosted`: SSH port, specify only to connect using an SSH tunnel. + - Service `google_cloud_mysql`: SSH port, specify only to connect via an SSH tunnel. + - Service `google_cloud_postgresql`: SSH port, specify only to connect via an SSH tunnel. + - Service `google_cloud_sqlserver`: SSH port, only specify when connecting via an SSH tunnel. + - Service `hana_sap_hva_b1`: SSH port, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_ecc`: SSH port, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_ecc_netweaver`: SSH port, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_s4`: SSH port, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_s4_netweaver`: SSH port, specify only to connect via an SSH tunnel. + - Service `heroku_postgres`: SSH port, specify only to connect via an SSH tunnel. + - Service `magento_mysql`: SSH port, specify only to connect via an SSH tunnel. + - Service `magento_mysql_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `maria`: SSH port, specify only to connect via an SSH tunnel. + - Service `maria_azure`: SSH port, specify only to connect via an SSH tunnel. + - Service `maria_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `mongo`: SSH port, specify only to connect via an SSH tunnel. + - Service `mongo_sharded`: SSH port, specify only to connect via an SSH tunnel. + - Service `mysql`: SSH port, specify only to connect via an SSH tunnel. + - Service `mysql_azure`: SSH port, specify only to connect via an SSH tunnel. + - Service `mysql_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `opendistro`: SSH port, specify only to connect using an SSH tunnel. + - Service `opensearch`: SSH port, specify only to connect using an SSH tunnel. + - Service `oracle`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_ebs`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_hva`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_rac`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_sap_hva`: SSH port, specify only to connect via an SSH tunnel. + - Service `oracle_sap_hva_netweaver`: SSH port, specify only to connect via an SSH tunnel. + - Service `postgres`: SSH port, specify only to connect via an SSH tunnel. + - Service `postgres_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `sap_hana`: SSH port, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH port, specify only to connect via an SSH tunnel. + - Service `sap_s4hana`: SSH port, specify only to connect via an SSH tunnel. + - Service `sftp`: Tunnel port, specify only to connect via SSH tunnel. + - Service `sql_server`: SSH port, specify only to connect via an SSH tunnel. + - Service `sql_server_hva`: SSH port, specify only to connect via an SSH tunnel. + - Service `sql_server_rds`: SSH port, specify only to connect via an SSH tunnel. + - Service `sql_server_sap_ecc_hva`: SSH port, specify only to connect via an SSH tunnel. +- `tunnel_user` (String) Field usage depends on `service` value: + - Service `aurora`: SSH user, specify only to connect via an SSH tunnel. + - Service `aurora_postgres`: SSH user, specify only to connect via an SSH tunnel. + - Service `azure_cosmos_for_mongo`: SSH user, specify only to connect via an SSH tunnel. + - Service `azure_postgres`: SSH user, specify only to connect via an SSH tunnel. + - Service `azure_sql_db`: SSH user, specify only to connect via an SSH tunnel. + - Service `azure_sql_managed_db`: SSH user, specify only to connect via an SSH tunnel. + - Service `clarity`: SSH user, specify only to connect via an SSH tunnel. + - Service `cockroachdb`: SSH user, specify only to connect via an SSH tunnel. + - Service `db2i_hva`: SSH user, specify only to connect via an SSH tunnel. Required for connector creation. + - Service `db2i_sap_hva`: SSH user, specify only to connect via an SSH tunnel. + - Service `documentdb`: SSH user, specify only to connect via an SSH tunnel. Required for connector creation. + - Service `dynamics_365_fo`: SSH user, specify only to connect via an SSH tunnel. + - Service `ehr`: SSH user, specify only to connect via an SSH tunnel. + - Service `elastic_cloud`: SSH user, specify only to connect using an SSH tunnel. + - Service `es_self_hosted`: SSH user, specify only to connect using an SSH tunnel. + - Service `google_cloud_mysql`: SSH user, specify only to connect via an SSH tunnel. + - Service `google_cloud_postgresql`: SSH user, specify only to connect via an SSH tunnel. + - Service `google_cloud_sqlserver`: SSH user, only specify when connecting via an SSH tunnel. + - Service `hana_sap_hva_b1`: SSH user, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_ecc`: SSH user, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_ecc_netweaver`: SSH user, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_s4`: SSH user, specify only to connect via an SSH tunnel. + - Service `hana_sap_hva_s4_netweaver`: SSH user, specify only to connect via an SSH tunnel. + - Service `heroku_postgres`: SSH user, specify only to connect via an SSH tunnel. + - Service `magento_mysql`: SSH user, specify only to connect via an SSH tunnel. + - Service `magento_mysql_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `maria`: SSH user, specify only to connect via an SSH tunnel. + - Service `maria_azure`: SSH user, specify only to connect via an SSH tunnel. + - Service `maria_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `mongo`: SSH user, specify only to connect via an SSH tunnel. + - Service `mongo_sharded`: SSH user, specify only to connect via an SSH tunnel. + - Service `mysql`: SSH user, specify only to connect via an SSH tunnel. + - Service `mysql_azure`: SSH user, specify only to connect via an SSH tunnel. + - Service `mysql_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `opendistro`: SSH user, specify only to connect using an SSH tunnel. + - Service `opensearch`: SSH user, specify only to connect using an SSH tunnel. + - Service `oracle`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_ebs`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_hva`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_rac`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_sap_hva`: SSH user, specify only to connect via an SSH tunnel. + - Service `oracle_sap_hva_netweaver`: SSH user, specify only to connect via an SSH tunnel. + - Service `postgres`: SSH user, specify only to connect via an SSH tunnel. + - Service `postgres_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `sap_hana`: SSH user, specify only to connect via an SSH tunnel. + - Service `sap_hana_db`: SSH user, specify only to connect via an SSH tunnel. + - Service `sap_s4hana`: SSH user, specify only to connect via an SSH tunnel. + - Service `sftp`: Tunnel user, specify only to connect via SSH tunnel. + - Service `sql_server`: SSH user, specify only to connect via an SSH tunnel. + - Service `sql_server_hva`: SSH user, specify only to connect via an SSH tunnel. + - Service `sql_server_rds`: SSH user, specify only to connect via an SSH tunnel. + - Service `sql_server_sap_ecc_hva`: SSH user, specify only to connect via an SSH tunnel. +- `type_name` (String) Field usage depends on `service` value: + - Service `akamai`: Your Akamai type name. + - Service `bubble`: Your Bubble type name. +- `unique_id` (String) +- `update_config_on_each_sync` (Boolean) Field usage depends on `service` value: + - Service `google_display_and_video_360`: Specifies whether the configuration is updated before each sync or only when the connector settings are saved. This parameter only takes effect when `config_method` is set to `REUSE_EXISTING`. The default value is `true`. +- `update_method` (String) Field usage depends on `service` value: + - Service `aurora`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `aurora_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `azure_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `azure_sql_db`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `azure_sql_managed_db`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `clarity`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `dynamics_365_fo`: Update Method + - Service `ehr`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `google_cloud_mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `google_cloud_postgresql`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `google_cloud_sqlserver`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `heroku_postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `magento_mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `magento_mysql_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `maria`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `maria_azure`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `maria_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `mysql`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `mysql_azure`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `mysql_rds`: The method to detect new or changed rows. Supported values:`BINLOG` - Fivetran uses your binary logs (also called binlogs) to request only the data that has changed since our last sync. This is the default value if no value is specified. `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle_ebs`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle_hva`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle_rac`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle_rds`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `oracle_sap_hva`: The method used to detect new or changed rows. Supported values: - `LOGMINER` - Fivetran uses LogMiner, a utility that is part of Oracle Database, to detect modified rows in the source tables. - `TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `postgres`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `postgres_rds`: The method to detect new or changed rows. Specify only for `"service": "postgres"` or `"service": "postgres_rds"`. Supported values:`WAL` - this method replicates new, changed and deleted rows by tailing the write-ahead log (WAL) via a logical slot. This is more efficient than the XMIN method, but requires more setup and monitoring.`XMIN` - this method detects new or changed rows via the XMIN system column, but is not capable of detecting deleted rows.`WAL_PGOUTPUT` - logical replication of the WAL using the pgoutput plugin. This method replicates new, changed, and deleted rows by tailing the write-ahead log (WAL) using a logical slot.`TELEPORT` - Fivetran's proprietary replication method that uses compressed snapshots to detect and apply changes. + - Service `redshift_db`: Default value: `Teleport` + - Service `snowflake_db`: Default value: `Teleport` + - Service `sql_server`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `sql_server_hva`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `sql_server_rds`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. + - Service `sql_server_sap_ecc_hva`: (Optional) The incremental update method the connector will use. The possible values are `"TELEPORT"` or `"NATIVE_UPDATE"`. The type defaults to `"NATIVE_UPDATE"` if the value is set to `null` or not specified. +- `uri` (String) Field usage depends on `service` value: + - Service `cosmos`: Cosmos resource instance address. +- `url_format` (String) Field usage depends on `service` value: + - Service `fountain`: Your Fountain URL format. +- `use_api_keys` (Boolean) Field usage depends on `service` value: + - Service `mandrill`: Whether to use multiple API keys for interaction. +- `use_customer_bucket` (Boolean) Field usage depends on `service` value: + - Service `appsflyer`: Use Custom Bucket. Set it to 'true' if the data is being synced to your S3 bucket instead of an AppsFlyer-managed bucket. +- `use_oracle_rac` (Boolean) Field usage depends on `service` value: + - Service `oracle_hva`: Default value: `false`. Set to `true` if you're using a RAC instance. + - Service `oracle_sap_hva`: Default value: `false`. Set to `true` if you're using a RAC instance. +- `use_pgp_encryption_options` (Boolean) Field usage depends on `service` value: + - Service `azure_blob_storage`: Set to `true` if files present in the Azure Blob Storage container are encrypted using PGP. Default value: `false`. + - Service `ftp`: Set to `true` if files are encrypted using PGP in the S3 bucket. Default value: `false`. + - Service `gcs`: Set to `true` if files are encrypted using PGP in the GCS bucket. Default value: `false`. + - Service `s3`: Set to `true` if files are encrypted using PGP in the S3 bucket. Default value: `false`. + - Service `sftp`: Set to `true` if files present in SFTP server are encrypted using PGP. Default value: `false`. + - Service `wasabi_cloud_storage`: Set to `true` if files are encrypted using PGP in the Wasabi Cloud Storage bucket. Default value: `false`. +- `use_service_account` (Boolean) Field usage depends on `service` value: + - Service `bigquery_db`: BigQuery use service account; default is false +- `use_template_labels` (Boolean) Field usage depends on `service` value: + - Service `mandrill`: Use template labels to filter templates for sync +- `use_webhooks` (Boolean) Field usage depends on `service` value: + - Service `github`: Set to `true` to capture deletes. + - Service `xero`: Updates to few fields like sent_to_contact in Invoice table might be missed if you don't enable this. +- `use_workspace` (Boolean) Field usage depends on `service` value: + - Service `bigquery_db`: Create and drop tables in a query results dataset. Default is false + - Service `snowflake_db`: Choose a database and schema to create temporary tables for syncs. +- `user` (String) Field usage depends on `service` value: + - Service `aurora`: The user name. + - Service `aurora_postgres`: The user name. + - Service `azure_cosmos_for_mongo`: Username for source database access. + - Service `azure_postgres`: The user name. + - Service `azure_sql_db`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `azure_sql_managed_db`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `clarity`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `cockroachdb`: The user name. + - Service `db2i_hva`: The user name. + - Service `db2i_sap_hva`: The username. + - Service `documentdb`: The user name. + - Service `dynamics_365_fo`: The user name. The format must be `user@domain`. + - Service `ehr`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `elastic_cloud`: The user name. + - Service `es_self_hosted`: The user name. + - Service `ftp`: FTP user. + - Service `google_cloud_mysql`: The user name. + - Service `google_cloud_postgresql`: The user name. + - Service `google_cloud_sqlserver`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `hana_sap_hva_b1`: The username. + - Service `hana_sap_hva_ecc`: The username. + - Service `hana_sap_hva_ecc_netweaver`: The username. + - Service `hana_sap_hva_s4`: The username. + - Service `hana_sap_hva_s4_netweaver`: The username. + - Service `heroku_postgres`: The user name. + - Service `jira`: The Jira username. + - Service `magento_mysql`: The user name. + - Service `magento_mysql_rds`: The user name. + - Service `maria`: The user name. + - Service `maria_azure`: The user name. + - Service `maria_rds`: The user name. + - Service `marin`: The Marin username. + - Service `mongo`: The user name. + - Service `mongo_sharded`: The user name. + - Service `mysql`: The user name. + - Service `mysql_azure`: The user name. + - Service `mysql_rds`: The user name. + - Service `opendistro`: The user name. + - Service `opensearch`: The user name. + - Service `oracle`: The user name. + - Service `oracle_ebs`: The user name. + - Service `oracle_hva`: The user name. + - Service `oracle_rac`: The user name. + - Service `oracle_rds`: The user name. + - Service `oracle_sap_hva`: The username. + - Service `oracle_sap_hva_netweaver`: The username. + - Service `outbrain`: The username or email of the Outbrain user. + - Service `postgres`: The user name. + - Service `postgres_rds`: The user name. + - Service `redshift_db`: The Redshift username. + - Service `sap_hana`: Your SAP HANA user name. + - Service `sap_s4hana`: Your SAP S/4 user name. + - Service `sftp`: SFTP user. + - Service `snowflake_db`: The Snowflake username. + - Service `splunk`: The Splunk username. + - Service `sql_server`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `sql_server_hva`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `sql_server_rds`: The user name. For Azure Databases, the format must be `user@domain`. + - Service `sql_server_sap_ecc_hva`: The user name. For Azure Databases, the format must be `user@domain`. +- `user_id` (String) Field usage depends on `service` value: + - Service `coassemble`: Your Coassemble user ID. + - Service `gmail`: Your Gmail user ID. + - Service `hibob`: Your HiBob Service User Token. + - Service `marketo`: Marketo SOAP API User Id. + - Service `okendo`: Your Okendo user ID. + - Service `playvox`: Your Playvox User ID. + - Service `sage_intacct`: User ID +- `user_key` (String, Sensitive) +- `user_name` (String) Field usage depends on `service` value: + - Service `workday`: Workday username. +- `user_profiles` (Set of String) Field usage depends on `service` value: + - Service `double_click_campaign_manager`: IDs of specific User Profiles to sync. Must be populated if `sync_mode` is set to `SpecificAccounts`. +- `user_token` (String, Sensitive) Field usage depends on `service` value: + - Service `hibob`: Your HiBob Service User ID. + - Service `konnect_insights`: Your Konnect Insights User Token. + - Service `sonarqube`: Your Sonarqube user token. +- `username` (String) Field usage depends on `service` value: + - Service `absorb_lms`: Your Absorb LMS username. + - Service `adobe_commerce`: Your Adobe Commerce username. + - Service `anaplan`: Your Anaplan user ID. Must be populated if `auth_mode` is set to `Basic`. + - Service `appfigures`: Your Appfigures Username. + - Service `ceridian_dayforce`: Your Ceridian Dayforce Username. + - Service `churnzero`: Your ChurnZero username. + - Service `cin7`: Your Cin7 API Username. + - Service `collibra`: Your collibra username. + - Service `concur`: The SAP Concur username. + - Service `confluence`: Your Confluence username. + - Service `contrast_security`: Your Contrast Security API Username. + - Service `dcl_logistics`: Your DCL Logistics username. + - Service `github`: `Login` of your GitHub profile. + - Service `gladly`: Your Gladly Username. + - Service `globalmeet`: Your GlobalMeet Username. + - Service `gorgias`: Your Gorgias username. + - Service `green_power_monitor`: Your GreenPowerMonitor username. + - Service `guru`: Your Guru username. + - Service `impact`: Your Impact Account SID + - Service `integral_ad_science`: Your integral_ad_science username. + - Service `itunes_connect`: Your Apple ID + - Service `jamf`: Your Jamf username. + - Service `khoros_care`: Your Khoros Care username. + - Service `kissmetrics`: Your Kissmetrics API Username. + - Service `klarna`: Your Klarna Username. + - Service `learnupon`: Your Learnupon username. + - Service `lessonly`: Your Lessonly username. + - Service `mailgun`: Your Mailgun API username. + - Service `myosh`: Your myosh username. + - Service `oracle_business_intelligence_publisher`: The Oracle Business Intelligence username. + - Service `oracle_fusion_cloud_apps_crm`: The Oracle Fusion Cloud username. + - Service `oracle_fusion_cloud_apps_fscm`: The Oracle Fusion Cloud username. + - Service `oracle_fusion_cloud_apps_hcm`: The Oracle Fusion Cloud username. + - Service `partnerize`: Your Partnerize account's username. + - Service `pingdom`: Your Pingdom Username. + - Service `podio`: Your Podio username. + - Service `quorum`: Your Quorum username . + - Service `revx`: Your RevX Username. + - Service `rtb_house`: Your RTB House username. + - Service `sap_business_by_design`: The SAP Business ByDesign username. + - Service `scorm`: Your Scorm App ID. + - Service `servicenow`: Your ServiceNow User ID (username). + - Service `shiphero`: Your ShipHero username. + - Service `shipstation`: Your ShipStation username. + - Service `shopware`: Your Shopware username. + - Service `splash`: Your Splash username. + - Service `starrez`: Your StarRez API username + - Service `stylight`: Your Stylight Username. + - Service `teamwork`: Your Teamwork username. + - Service `testrail`: Your TestRail username. + - Service `ukg_pro`: Your UKG Pro username. + - Service `unicommerce`: Your uniware login username. + - Service `upland`: Your Upland Software Username. + - Service `veevavault`: Your Veeva Vault username. + - Service `when_i_work`: Your When I Work username. + - Service `wherefour`: Your Wherefour username. + - Service `workday_financial_management`: Workday username. + - Service `workday_hcm`: Username of your Workday Integration System User account + - Service `xandr`: Your Xandr username. + - Service `younium`: Your Younium username. +- `version` (String) Field usage depends on `service` value: + - Service `criteo_retail_media`: Your Criteo Retail Media version. +- `view_attribution_window` (String) Field usage depends on `service` value: + - Service `facebook`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/applications/facebook-ad-insights/api-config#viewattributionwindow). + - Service `pinterest_ads`: The number of days to use as the conversion attribution window for a 'view' action. + - Service `snapchat_ads`: The time period to attribute conversions based on views. Default value: `DAY_1` +- `view_through_attribution_window_size` (String) Field usage depends on `service` value: + - Service `linkedin_ads`: The time period to attribute conversions based on views. Default value: `DAY_7` +- `webhook_endpoint` (String) Field usage depends on `service` value: + - Service `appsflyer`: Webhook Url +- `webhook_key` (String) Field usage depends on `service` value: + - Service `xero`: Webhook Key +- `webhook_url` (String) Field usage depends on `service` value: + - Service `branch`: Webhook URL + - Service `pipedrive`: The registered URL for webhooks in your Pipedrive dashboard. + - Service `segment`: Webhook URL. + - Service `xero`: (ReadOnly) The Webhook URL generated by Fivetran. You can configure this in XERO. +- `word_press_site_id_or_woocommerce_domain_name` (String) Field usage depends on `service` value: + - Service `woocommerce`: The Site ID of your WordPress hosted WooCommerce instance or the subdomain of your self-hosted WooCommerce instance. +- `workplace_id` (String) Field usage depends on `service` value: + - Service `moloco`: Your Moloco workplace ID. +- `workspace` (String) Field usage depends on `service` value: + - Service `mode`: Your Mode Workspace. +- `workspace_name` (String) Field usage depends on `service` value: + - Service `bigquery_db`: Workspace Dataset Name + - Service `snowflake_db`: The name of the database where the temporary tables will be created. +- `workspace_same_as_source` (Boolean) Field usage depends on `service` value: + - Service `bigquery_db`: Use the source dataset as the workspace dataset +- `workspace_schema` (String) Field usage depends on `service` value: + - Service `snowflake_db`: The name of the schema that belongs to the workspace database where the temporary tables will be created. +- `ws_certificate` (String, Sensitive) Field usage depends on `service` value: + - Service `adp_workforce_now`: Web Services Certificate. +- `x_api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X API key. +- `x_key` (String, Sensitive) Field usage depends on `service` value: + - Service `medallia_agile_research`: Your Medallia Agile Research key. +- `x_master_key` (String, Sensitive) Field usage depends on `service` value: + - Service `medallia_agile_research`: Your Medallia Agile Research master key. +- `x_user_email` (String) Field usage depends on `service` value: + - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X User Email. +- `x_user_token` (String, Sensitive) Field usage depends on `service` value: + - Service `workday_strategic_sourcing`: Your Workday Strategic Sourcing X User Token. + +Read-Only: + +- `authorization_method` (String) +- `last_synced_changes__utc_` (String) +- `latest_version` (String) +- `service_version` (String) +- `subscriber_name` (String) Field usage depends on `service` value: + - Service `azure_service_bus`: The subscriber name. If the connection string does not have manage permission, you need to specify a subscriber name we can use to fetch data. If not specified, we default to `fivetran_sub_schema` + + +### Nested Schema for `config.accounts_reddit_ads` + +Optional: + +- `name` (String) Field usage depends on `service` value: + - Service `reddit_ads`: Reddit username of the additional linked account. + + + +### Nested Schema for `config.adobe_analytics_configurations` + +Optional: + +- `calculated_metrics` (Set of String) Field usage depends on `service` value: + - Service `adobe_analytics`: The calculated_metrics that you want to sync. +- `elements` (Set of String) Field usage depends on `service` value: + - Service `adobe_analytics`: The elements that you want to sync. +- `metrics` (Set of String) Field usage depends on `service` value: + - Service `adobe_analytics`: The metrics that you want to sync. +- `report_suites` (Set of String) Field usage depends on `service` value: + - Service `adobe_analytics`: Specific report suites to sync. Must be populated if `sync_mode` is set to `SpecificReportSuites`. +- `segments` (Set of String) Field usage depends on `service` value: + - Service `adobe_analytics`: The segments that you want to sync. +- `sync_mode` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: Whether to sync all report suites or specific report suites. Default value: `AllReportSuites` . +- `table` (String) Field usage depends on `service` value: + - Service `adobe_analytics`: The table name unique within the schema to which connector will sync the data. Required for connector creation. + + + +### Nested Schema for `config.app_ids_appsflyer` + +Optional: + +- `app_id` (String) Field usage depends on `service` value: + - Service `appsflyer`: Your App ID + + + +### Nested Schema for `config.custom_payloads` + +Optional: + +- `key` (String) Field usage depends on `service` value: + - Service `aws_lambda`: Payload Key + - Service `azure_function`: Payload Key + - Service `google_cloud_function`: Payload Key +- `value` (String) Field usage depends on `service` value: + - Service `aws_lambda`: Payload Value + - Service `azure_function`: Payload Value + - Service `google_cloud_function`: Payload Value + + + +### Nested Schema for `config.custom_reports` + +Optional: + +- `add_metric_variants` (Boolean) Field usage depends on `service` value: + - Service `snapchat_ads`: Add fields for separate \"swipe-up\" and \"view\" variants of selected metrics +- `aggregate` (String) Field usage depends on `service` value: + - Service `tiktok_ads`: Time aggregation of report +- `base_metrics_fields` (Set of String) Field usage depends on `service` value: + - Service `snapchat_ads`: [List of Core, Additional and Conversion Metrics Stats Fields](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#basemetricsfields). +- `breakdown` (String) Field usage depends on `service` value: + - Service `snapchat_ads`: [Sets Breakdown on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#breakdown). +- `breakout` (String) Field usage depends on `service` value: + - Service `snapchat_ads`: [Sets Breakout on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#breakout). +- `conversions_report_included` (Boolean) Field usage depends on `service` value: + - Service `reddit_ads`: The boolean value specifying whether to enable or disable event conversions data synchronisation. Default value: `false` +- `custom_events_included` (Boolean) Field usage depends on `service` value: + - Service `reddit_ads`: The boolean value specifying whether the custom events are included in event conversions report. Default value: `false` +- `dimension` (String) Field usage depends on `service` value: + - Service `snapchat_ads`: [Sets Dimension on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#dimension). +- `dimensions` (Set of String) Field usage depends on `service` value: + - Service `tiktok_ads`: Dimensions to synced +- `event_names` (Set of String) Field usage depends on `service` value: + - Service `reddit_ads`: The list of events the conversion data will be synchronised for +- `granularity` (String) Field usage depends on `service` value: + - Service `snapchat_ads`: [Sets Granularity on custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/customr-reports#granularity). +- `level` (String) Field usage depends on `service` value: + - Service `reddit_ads`: Level of custom report. +- `metrics` (Set of String) Field usage depends on `service` value: + - Service `tiktok_ads`: Metrics to be synced +- `report_fields` (Set of String) Field usage depends on `service` value: + - Service `reddit_ads`: The list of fields included in custom report +- `report_name` (String) Field usage depends on `service` value: + - Service `reddit_ads`: The table name within the schema to which connector syncs the data of the specific report. + - Service `snapchat_ads`: Custom report name (must be unique) +- `report_type` (String) Field usage depends on `service` value: + - Service `tiktok_ads`: Type of report to be generated +- `segmentation` (String) Field usage depends on `service` value: + - Service `reddit_ads`: Level of custom report. +- `sk_ad_metrics_fields` (Set of String) Field usage depends on `service` value: + - Service `snapchat_ads`: [List of SKAd Metrics fields in custom report](https://fivetran.com/docs/connectors/applications/snapchat-ads/custom-reports#skadmetricsfields). +- `table_name` (String) Field usage depends on `service` value: + - Service `tiktok_ads`: Destination Table name of report +- `time_zone` (String) Field usage depends on `service` value: + - Service `reddit_ads`: The specific time zone to sync report data if `useAccountTimeZone` set to `false`. +- `time_zone_mode` (String) Field usage depends on `service` value: + - Service `reddit_ads`: When this parameter is set to `ACCOUNT`, connector will use account related time zone to sync report data. Default value: `ACCOUNT`. Possible values: `ACCOUNT`, `USER` + + + +### Nested Schema for `config.custom_tables` + +Optional: + +- `action_breakdowns` (Set of String) Field usage depends on `service` value: + - Service `facebook_ads`: List of action_breakdowns which connector will sync. [Possible action_breakdowns values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#actionbreakdowns). +- `action_report_time` (String) Field usage depends on `service` value: + - Service `facebook_ads`: The report time of action stats. [Possible action_report time values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#actionreporttime). +- `aggregation` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Options to select aggregation duration. [Possible aggregation values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#aggregation). +- `breakdowns` (Set of String) Field usage depends on `service` value: + - Service `facebook_ads`: List of breakdowns which connector will sync. [Possible breakdowns values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#breakdowns). +- `click_attribution_window` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Time period to attribute conversions based on clicks. [Possible click_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#clickattributionwindow). +- `config_type` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Option to select Prebuilt Reports or Custom Reports. [Possible config_type values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#configtype). +- `engaged_view_attribution_window` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Time period to attribute conversions based on engaged views. [Possible view_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads#engagedviewattributionwindow). +- `fields` (Set of String) Field usage depends on `service` value: + - Service `facebook_ads`: List of fields which connector will sync. [Possible field values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#fields). +- `level` (String) +- `prebuilt_report_name` (String) Field usage depends on `service` value: + - Service `facebook_ads`: The report name to which connector will sync the data. [Possible prebuilt_report values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#prebuiltreport). +- `table_name` (String) Field usage depends on `service` value: + - Service `facebook_ads`: The table name within the schema to which the connector will sync the data. It must be unique within the connector and must comply with [Fivetran's naming conventions](https://fivetran.com/docs/getting-started/core-concepts#namingconventions). +- `use_unified_attribution_setting` (Boolean) +- `view_attribution_window` (String) Field usage depends on `service` value: + - Service `facebook_ads`: Time period to attribute conversions based on views. [Possible view_attribution_window values](https://fivetran.com/docs/connectors/applications/facebook-ads-insights/api-config#viewattributionwindow). + + + +### Nested Schema for `config.project_credentials` + +Optional: + +- `api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `amplitude`: The API key of the project. +- `project` (String) Field usage depends on `service` value: + - Service `amplitude`: The project name you wish to use with Fivetran. +- `secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `amplitude`: The secret key of the project. + + + +### Nested Schema for `config.report_configs` + +Optional: + +- `config_type` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Set the value to `PREBUILT` if it's one of the preconfigured reports (see the `prebuilt_report_type` option). Otherwise, set to `CUSTOM`. +- `currency` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: [Currency](https://developer.yahooinc.com/dsp/api/docs/reporting/payloadspec.html) used in a report. Default value: `USD`. +- `dimensions` (Set of String) Field usage depends on `service` value: + - Service `yahoo_dsp`: [Dimensions](https://developer.yahooinc.com/dsp/api/docs/reporting/dimensions.html) used in a report. Must be populated if `config_type` is set to `CUSTOM`. +- `interval_type` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: The [granularity](https://developer.yahooinc.com/dsp/api/docs/reporting/range-examples.html#interval-type-id) of data in a report. Default value: `DAY`. +- `metrics` (Set of String) Field usage depends on `service` value: + - Service `yahoo_dsp`: [Metrics](https://developer.yahooinc.com/dsp/api/docs/reporting/metrics.html) used in a report. Must be populated if `config_type` is set to `CUSTOM`. +- `prebuilt_report_type` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Specific report type to sync. Must be populated if `config_type` is set to `PREBUILT`. +- `report_name` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Table name in destination. +- `time_zone` (String) Field usage depends on `service` value: + - Service `yahoo_dsp`: Specify the time zone to be used to request report data +- `use_advertiser_timezone` (Boolean) Field usage depends on `service` value: + - Service `yahoo_dsp`: Use advertiser timezone to request report data. + + + +### Nested Schema for `config.report_list` + +Optional: + +- `dimension` (String) Field usage depends on `service` value: + - Service `spotify_ads`: The dimension (entity_type) to sync. +- `fields` (Set of String) Field usage depends on `service` value: + - Service `spotify_ads`: A list of the fields (metrics) to sync. +- `granularity` (String) Field usage depends on `service` value: + - Service `spotify_ads`: The report granularity. +- `table` (String) Field usage depends on `service` value: + - Service `spotify_ads`: The table name within the schema to which connector will sync the data of the specific report. + + + +### Nested Schema for `config.reports` + +Optional: + +- `aggregation` (String) Field usage depends on `service` value: + - Service `google_search_console`: (Optional) Aggregation type. Supported only for the `SEARCH_RESULTS` report type +- `attributes` (Set of String) Field usage depends on `service` value: + - Service `google_search_ads_360`: The report attributes included to sync. +- `config_type` (String) Field usage depends on `service` value: + - Service `google_analytics`: Whether to use the [Prebuilt Reports or Custom Reports](https://fivetran.com/docs/connectors/applications/google-analytics#schemainformation). + - Service `google_analytics_4`: Whether to use the Prebuilt Reports or Custom Reports. +- `dimensions` (Set of String) Field usage depends on `service` value: + - Service `google_analytics`: The report dimensions to include into a sync. The `date` dimension is mandatory for all the report types. + - Service `google_analytics_4`: The report dimensions to include into a sync. + - Service `google_search_console`: The report dimensions included to sync. +- `fields` (Set of String) Field usage depends on `service` value: + - Service `google_ads`: A list of the fields to sync. Must be populated if `config_type` is set to `Custom`. +- `filter` (String) Field usage depends on `service` value: + - Service `google_analytics`: String parameter restricts the data returned for your report. To use the filter parameter, specify a dimension or metric on which to filter, followed by the filter expression +- `filter_field_name` (String) Field usage depends on `service` value: + - Service `google_analytics_4`: The dimension name to filter on. +- `filter_type` (String) Field usage depends on `service` value: + - Service `google_analytics_4`: Filter type for reports request. Possible values are INCLUDE and EXCLUDE +- `filter_value` (String) +- `metrics` (Set of String) Field usage depends on `service` value: + - Service `google_analytics`: The report metrics to include into a sync. + - Service `google_analytics_4`: The report metrics to include into a sync. + - Service `google_search_ads_360`: The report metrics included to sync. +- `prebuilt_report` (String) Field usage depends on `service` value: + - Service `google_analytics`: The name of the Prebuilt Report from which the connector will sync the data. + - Service `google_analytics_4`: The name of the Prebuilt Report from which the connector will sync the data. +- `report_type` (String) Field usage depends on `service` value: + - Service `google_ads`: The name of the Google Ads report from which the connector will sync the data. [Possible report_type values](https://developers.google.com/adwords/api/docs/appendix/reports#report-types). + - Service `google_search_ads_360`: The type of report + - Service `google_search_console`: The type of report +- `rollback_window` (Number) Field usage depends on `service` value: + - Service `google_analytics_4`: The custom window size for rollback syncs. +- `search_types` (Set of String) Field usage depends on `service` value: + - Service `google_search_console`: Search types included to sync. Supported only for the `SEARCH_RESULTS` report type +- `segment_ids` (Set of String) +- `segments` (Set of String) Field usage depends on `service` value: + - Service `google_analytics`: A segment is a subset of your Analytics data that is made up of one or more non-destructive filters (filters that do not alter the underlying data). Those filters isolate subsets of users, sessions, and hits. + - Service `google_search_ads_360`: The report segments included to sync. +- `table` (String) Field usage depends on `service` value: + - Service `google_ads`: The table name within the schema to which connector will sync the data of the specific report. + - Service `google_analytics`: The table name within the schema to which connector will sync the data of the specific report. + - Service `google_analytics_4`: The table name within the schema to which connector will sync the data of the specific report. + - Service `google_search_ads_360`: The name of a table within the schema to which connector syncs the data of a given report. + - Service `google_search_console`: The name of a table within the schema to which connector syncs the data of a given report. +- `time_aggregation_granularity` (String) Field usage depends on `service` value: + - Service `google_analytics_4`: The report data aggregation time granularity. + + + +### Nested Schema for `config.secrets_list` + +Optional: + +- `key` (String) Field usage depends on `service` value: + - Service `aws_lambda`: Secret Key. + - Service `azure_function`: Key + - Service `google_cloud_function`: Key +- `value` (String, Sensitive) Field usage depends on `service` value: + - Service `aws_lambda`: Secret Value. + - Service `azure_function`: Value + - Service `google_cloud_function`: Value + + + +### Nested Schema for `config.tenant_configs` + +Optional: + +- `subdomain` (String) Field usage depends on `service` value: + - Service `reltio`: Your Reltio subdomain. +- `tenant_id` (String) Field usage depends on `service` value: + - Service `reltio`: Your Reltio tenant ID. + + + + +### Nested Schema for `destination_schema` + +Optional: + +- `name` (String) The connector schema name in destination. Has to be unique within the group (destination). Required for connector creation. +- `prefix` (String) The connector schema prefix has to be unique within the group (destination). Each replicated schema is prefixed with the provided value. Required for connector creation. +- `table` (String) The table name unique within the schema to which connector will sync the data. Required for connector creation. + + + +### Nested Schema for `timeouts` + +Optional: + +- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). +- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + +## Import + +1. To import an existing `fivetran_connector` resource into your Terraform state, you need to get **Fivetran Connector ID** on the **Setup** tab of the connector page in your Fivetran dashboard. + +2. Retrieve all connectors in a particular group using the [fivetran_group_connectors data source](/docs/data-sources/group_connectors). To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). + +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_connector" "my_imported_connector" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_connector.my_imported_connector {your Fivetran Connector ID} +``` + +5. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_connector.my_imported_connector' +``` +6. Copy the values and paste them to your `.tf` configuration. + +-> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to connectors. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/connectors/config) for reference to find the properties you need to keep in the `config` section. + +### How to authorize connector + +## GitHub connector example + +To authorize a GitHub connector via terraform using personal access token you should specify `auth_mode`, `username` and `pat` inside `config` block instead of `auth` and set `run_setup_tests` to `true`: + +```hcl +resource "fivetran_connector" "my_github_connector" { + group_id = "group_id" + service = "github" + run_setup_tests = "true" + + destination_schema { + name = "github_connector" + } + + config { + sync_mode = "AllRepositories" + use_webhooks = "false" + auth_mode = "PersonalAccessToken" + username = "git-hub-user-name" + pat = "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + } +} +``` \ No newline at end of file diff --git a/docs/resources/connector_certificates.md b/docs/resources/connector_certificates.md new file mode 100644 index 00000000..87bf8da2 --- /dev/null +++ b/docs/resources/connector_certificates.md @@ -0,0 +1,46 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "fivetran_connector_certificates Resource - terraform-provider-fivetran" +subcategory: "" +description: |- + +--- + +# fivetran_connector_certificates (Resource) + + + + + + +## Schema + +### Required + +- `connector_id` (String) The unique identifier for the target connection within the Fivetran system. + +### Optional + +- `certificate` (Block Set) (see [below for nested schema](#nestedblock--certificate)) + +### Read-Only + +- `id` (String) The unique identifier for the resource. Equal to target connection id. + + +### Nested Schema for `certificate` + +Required: + +- `encoded_cert` (String, Sensitive) Base64 encoded certificate. +- `hash` (String) Hash of the certificate. + +Read-Only: + +- `name` (String) Certificate name. +- `public_key` (String) The SSH public key. +- `sha1` (String) Certificate sha1. +- `sha256` (String) Certificate sha256. +- `type` (String) Type of the certificate. +- `validated_by` (String) User name who validated the certificate. +- `validated_date` (String) The date when certificate was approved. diff --git a/docs/resources/connector_fingerprints.md b/docs/resources/connector_fingerprints.md new file mode 100644 index 00000000..64d33b6d --- /dev/null +++ b/docs/resources/connector_fingerprints.md @@ -0,0 +1,79 @@ +--- +page_title: "Resource: fivetran_connector_fingerprints" +--- + +# Resource: fivetran_connector_fingerprints + +This resource allows you to manage list of approved SSH fingerprints for a particular connector. + +## Example Usage + +```hcl +resource "fivetran_connector_fingerprints" "my_connector_approved_fingerprints" { + connector_id = fivetran_connector.my_connector.id + fingerprint { + hash = "jhgfJfgrI6yy..." + public_key= "ssh-rsa CCCCB3NzaC1yc2ECCASFWFWDFRWT5WAS ... fivetran user key" + } + fingerprint { + hash = "eUtPirI6yytWe..." + public_key= "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6 ... fivetran user key" + } +} +``` + + +## Schema + +### Required + +- `connector_id` (String) The unique identifier for the target connection within the Fivetran system. + +### Optional + +- `fingerprint` (Block Set) (see [below for nested schema](#nestedblock--fingerprint)) + +### Read-Only + +- `id` (String) The unique identifier for the resource. Equal to target connection id. + + +### Nested Schema for `fingerprint` + +Required: + +- `hash` (String) Hash of the fingerprint. +- `public_key` (String) The SSH public key. + +Read-Only: + +- `validated_by` (String) User name who validated the fingerprint. +- `validated_date` (String) The date when fingerprint was approved. + +## Import + +1. To import an existing `fivetran_connector_fingerprints` resource into your Terraform state, you need to get **Fivetran Connector ID** on the **Setup** tab of the connector page in your Fivetran dashboard. + +2. Retrieve all connectors in a particular group using the [fivetran_group_connectors data source](/docs/data-sources/group_connectors). To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). + +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_connector_fingerprints" "my_imported_connector_fingerprints" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_connector_fingerprints.my_imported_connector_fingerprints {your Fivetran Connector ID} +``` + +5. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_connector_fingerprints.my_imported_connector_fingerprints' +``` + +6. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/connector_schedule.md b/docs/resources/connector_schedule.md new file mode 100644 index 00000000..b467da70 --- /dev/null +++ b/docs/resources/connector_schedule.md @@ -0,0 +1,73 @@ +--- +page_title: "Resource: fivetran_connector_schedule" +--- + +# Resource: fivetran_connector_schedule + +-This resource allows you to manage connectors schedule: pause/unpause connector, set daily_sync_time and sync_frequency. + +## Example Usage + +```hcl +resource "fivetran_connector_schedule" "my_connector_schedule" { + connector_id = fivetran_connector.my_connector.id + + sync_frequency = "1440" + daily_sync_time = "03:00" + + paused = false + pause_after_trial = true + + schedule_type = "auto" +} +``` + + +## Schema + +### Required + +- `connector_id` (String) The unique identifier for the connector within the Fivetran system. + +### Optional + +- `daily_sync_time` (String) The optional parameter that defines the sync start time when the sync frequency is already set or being set by the current request to 1440. It can be specified in one hour increments starting from 00:00 to 23:00. If not specified, we will use [the baseline sync start time](https://fivetran.com/docs/getting-started/syncoverview#syncfrequencyandscheduling). This parameter has no effect on the [0 to 60 minutes offset](https://fivetran.com/docs/getting-started/syncoverview#syncstarttimesandoffsets) used to determine the actual sync start time. +- `pause_after_trial` (String) Specifies whether the connector should be paused after the free trial period has ended. +- `paused` (String) Specifies whether the connector is paused. +- `schedule_type` (String) The connector schedule configuration type. Supported values: auto, manual. +- `sync_frequency` (String) The connector sync frequency in minutes. Supported values: 1, 5, 15, 30, 60, 120, 180, 360, 480, 720, 1440. + +### Read-Only + +- `id` (String) The unique resource identifier (equals to `connector_id`). + +## Import + +You don't need to import this resource as it is synthetic. + +To fetch schedule values from existing connector use `fivetran_connector` data source: +```hcl +data "fivetran_connector" "my_connector" { + id = "my_connector_id" +} + +# now you can use schedule values from this data_source: +# sync_frequency = data.fivetran_connector.my_connector.sync_frequency +# paused = data.fivetran_connector.my_connector.paused +``` + +This resource manages settings for already existing connector instance and doesn't create a new one. +If you already have an existing connector with id = `my_connector_id` just define `fivetran_connector_schedule` resource: + +```hcl +resource "fivetran_connector_schedule" "my_connector_schedule" { + connector_id = "my_connector_id" + + sync_frequency = "360" + paused = false + pause_after_trial = true + schedule_type = "auto" +} +``` + +-> NOTE: You can't have several resources managing the same `connector_id`. They will be in conflict ater each `apply`. \ No newline at end of file diff --git a/docs/resources/connector_schema_config.md b/docs/resources/connector_schema_config.md new file mode 100644 index 00000000..e465ed2c --- /dev/null +++ b/docs/resources/connector_schema_config.md @@ -0,0 +1,280 @@ + --- +page_title: "Resource: fivetran_connector_schema_config" +--- + +# Resource: fivetran_connector_schema_config + +This resource allows you to manage the Standard Configuration settings of a connector: + - Define the schema change handling settings + - Enable and disable schemas, tables, and columns + +The resource is in **ALPHA** state. The resource schema and behavior are subject to change without prior notice. + +Known issues: + - Definition of `sync_mode` for table may cause infinite drifting changes in plan. + - Using `schema` field causes very slow plan preparation because of slow performance for SetTypable fields in terraform-framework, please use MapTypable `schemas` field instead. + +## Usage guide + +Note that all configuration settings are aligned to the `schema_change_handling` settings, except the settings explicitly specified in `schemas`. +In `schemas`, you only override the default settings defined by the chosen `schema_change_handling` option. +The allowed `schema_change_handling` options are as follows: +- `ALLOW_ALL`- all schemas, tables and columns are ENABLED by default. You only need to explicitly specify DISABLED items or hashed tables +- `BLOCK_ALL` - all schemas, tables and columns are DISABLED by default, the configuration only specifies ENABLED items +- `ALLOW_COLUMNS` - all schemas and tables are DISABLED by default, but all columns are ENABLED by default, the configuration specifies ENABLED schemas and tables, and DISABLED columns + +Note that system-enabled tables and columns (such as primary and foreign key columns, and [system tables and columns](https://fivetran.com/docs/getting-started/system-columns-and-tables)) are synced regardless of the `schema_change_handling` settings and configuration. You can only [disable non-locked columns in the system-enabled tables](#nestedblock--nonlocked). If the configuration specifies any system tables or locked system table columns as disabled ( `enabled = "false"`), the provider just ignores these statements. + +## Usage examples + +### Example for the ALLOW_ALL option + +In `schemas`, you only need to specify schemas and tables you want to disable (`enabled = "false"`) and columns you want to disable or hash (`hashed = "true"`). + +```hcl +resource "fivetran_connector_schema_config" "schema" { + connector_id = "connector_id" + schema_change_handling = "ALLOW_ALL" + schemas = { + "schema_name" = { + tables = { + "table_name" = { + columns = { + "hashed_column_name" = { + hashed = true + } + "blocked_column_name" = { + enabled = false + } + } + } + "blocked_table_name" = { + enabled = false + } + } + } + "blocked_schema" = { + enabled = false + } + } +} +``` + +The configuration resulting from the example request is as follows: +- All new and existing schemas except `blocked_schema` are enabled +- All new and existing tables in the `schema_name` schema except the `blocked_table_name` table are enabled +- All new and existing columns in the`table_name` of the `schema_name` schema except the `blocked_column_name` column are enabled +- The `hashed_column_name` column is hashed in the `table_name` table in the `schema_name` schema +- All new schemas, tables, and columns are enabled once captured by the connector during the sync except those disabled by the system + +### Example for the BLOCK_ALL option + +```hcl +resource "fivetran_connector_schema_config" "schema" { + connector_id = "connector_id" + schema_change_handling = "BLOCK_ALL" + schemas = { + "schema_name" = { + tables = { + "table_name" = { + columns = { + "hashed_column_name" = { + hashed = true + } + } + } + "enabled_table_name" = { + enabled = true + } + } + } + "enabled_schema" ={ + enabled = true + } + } +} +``` + +The configuration resulting from the example request is as follows: + +- All new and existing schemas except the `enabled_schema` and `schema_name` are disabled +- Only system-enabled tables and columns are enabled in the `enabled_schema` schema +- All new and existing tables in the `schema_name` schema except the `enabled_table_name`, `table_name` tables and system tables are disabled +- All new and existing columns in the `table_name` table of the `schema_name` schema are disabled except the `hashed_column_name` column and system columns +- The `hashed_column_name` column in the `table_name` table the `schema_name` schema is hashed +- All new columns except the system-enabled columns, all schemas and tables are disabled once captured by the connector during the sync + +### Example for the ALLOW_COLUMNS option + +In `schemas`, you only need to specify schemas and tables you want to enable `enabled = "true"`) and columns you want to disable (`enabled = "false"`) or hash (`hashed = "true"`). + +```hcl +resource "fivetran_connector_schema_config" "schema" { + connector_id = "connector_id" + schema_change_handling = "ALLOW_COLUMNS" + schemas = { + "schema_name" = { + tables = { + "table_name" = { + columns = { + "hashed_column_name" = { + hashed = true + } + "disabled_column_name" = { + enabled = false + } + } + } + "enabled_table" = { + enabled = true + } + } + } + "enabled_schema_name" = { + enabled = true + } + } +} +``` + +The configuration resulting from the example request is as follows: + +- All specified existing schemas and tables are enabled and all columns inside them are enabled by default, unless `enabled = "false"` is specified for the column +- All new and existing schemas except the `enabled_schema_name` and `schema_name` are disabled +- Only system-enabled tables and columns would be enabled in the`enabled_schema_name` schema +- All new and existing tables in the `schema_name` schema except the `enabled_table_name`, `table_name` and system-enabled tables are disabled +- All new and existing columns in the`table_name` table of the `schema_name` schema except the `disabled_columns_name` and system-enabled columns are enabled +- The `hashed_column_name` would be hashed in table `table_name` in schema `schema_name` +- All new non system-enabled tables/schemas would be disabled once captured by connector on sync +- All new non system-enabled columns inside enabled tables (including system enabled-tables) would be enabled once captured by connector on sync + + +### Non-locked table column management in system-enabled tables + +You cannot manage system-enabled tables, but you can manage its non-locked columns. For example, your schema `schema_name` has a system-enabled table `system_enabled_table` that can't be disabled, and you want to disable one of its columns named `column_name`: + +```hcl +resource "fivetran_connector_schema_config" "schema" { + connector_id = "connector_id" + schema_change_handling = "ALLOW_COLUMNS" + schemas = { + "schema_name" = { + tables = { + "system_enabled_table" = { + columns = { + "column_name" = { + enabled = false + } + } + } + } + } + } +} +``` + + +## Schema + +### Required + +- `connector_id` (String) The unique identifier for the connector within the Fivetran system. + +### Optional + +- `schema` (Block Set, Deprecated) (see [below for nested schema](#nestedblock--schema)) +- `schema_change_handling` (String) The value specifying how new source data is handled. +- `schemas` (Attributes Map) Map of schema configurations. (see [below for nested schema](#nestedatt--schemas)) +- `schemas_json` (String) Schema settings in Json format, following Fivetran API endpoint contract for `schemas` field (a map of schemas). +- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) +- `validation_level` (String) The value defines validation method. +- NONE: no validation, any configuration accepted. +- TABLES: validate table names, fail on attempt to configure non-existing schemas/tables. +- COLUMNS: validate the whole schema config including column names. The resource will try to fetch columns for every configured table and verify column names. + +### Read-Only + +- `id` (String) The unique resource identifier (equals to `connector_id`). + + +### Nested Schema for `schema` + +Required: + +- `name` (String) The schema name within your destination in accordance with Fivetran conventional rules. + +Optional: + +- `enabled` (Boolean) The boolean value specifying whether the sync for the schema into the destination is enabled. +- `table` (Block Set) (see [below for nested schema](#nestedblock--schema--table)) + + +### Nested Schema for `schema.table` + +Required: + +- `name` (String) The table name within your destination in accordance with Fivetran conventional rules. + +Optional: + +- `column` (Block Set) (see [below for nested schema](#nestedblock--schema--table--column)) +- `enabled` (Boolean) The boolean value specifying whether the sync of table into the destination is enabled. +- `sync_mode` (String) This field appears in the response if the connector supports switching sync modes for tables. + + +### Nested Schema for `schema.table.column` + +Required: + +- `name` (String) The column name within your destination in accordance with Fivetran conventional rules. + +Optional: + +- `enabled` (Boolean) The boolean value specifying whether the sync of the column into the destination is enabled. +- `hashed` (Boolean) The boolean value specifying whether a column should be hashed. +- `is_primary_key` (Boolean) + + + + + +### Nested Schema for `schemas` + +Optional: + +- `enabled` (Boolean) The boolean value specifying whether the sync for the schema into the destination is enabled. +- `tables` (Attributes Map) Map of table configurations. (see [below for nested schema](#nestedatt--schemas--tables)) + + +### Nested Schema for `schemas.tables` + +Optional: + +- `columns` (Attributes Map) Map of table configurations. (see [below for nested schema](#nestedatt--schemas--tables--columns)) +- `enabled` (Boolean) The boolean value specifying whether the sync for the table into the destination is enabled. +- `sync_mode` (String) This field appears in the response if the connector supports switching sync modes for tables. + + +### Nested Schema for `schemas.tables.columns` + +Optional: + +- `enabled` (Boolean) The boolean value specifying whether the sync of the column into the destination is enabled. +- `hashed` (Boolean) The boolean value specifying whether a column should be hashed. +- `is_primary_key` (Boolean) + + + + + +### Nested Schema for `timeouts` + +Optional: + +- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). +- `read` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). Read operations occur during any refresh or planning operation when refresh is enabled. +- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + +## Import + +You don't need to import this resource as it is synthetic (doesn't create new instances in upstream). \ No newline at end of file diff --git a/docs/resources/dbt_git_project_config.md b/docs/resources/dbt_git_project_config.md new file mode 100644 index 00000000..81f29780 --- /dev/null +++ b/docs/resources/dbt_git_project_config.md @@ -0,0 +1,64 @@ +--- +page_title: "Resource: fivetran_dbt_git_project_config" +--- + +# Resource: fivetran_dbt_git_project_config + +Resource is in ALPHA state. + +This resource allows you to add and manage dbt Git Projects Configs. + +## Example Usage + +```hcl +resource "fivetran_dbt_git_project_config" "git_project_config" { + project_id = "project_id" + git_remote_url = "your_git_remote_url" + git_branch = "main" + folder_path = "/dbt/project/folder/path" +} +``` + + +## Schema + +### Required + +- `project_id` (String) The unique identifier for the dbt Project within the Fivetran system. + +### Optional + +- `ensure_readiness` (Boolean) Should resource wait for project to finish initialization. Default value: false. +- `folder_path` (String) Folder in Git repo with your dbt project. +- `git_branch` (String) Git branch. +- `git_remote_url` (String) Git remote URL with your dbt project. + +### Read-Only + +- `id` (String) The unique identifier for the dbt Project within the Fivetran system. + +## Import + +1. To import an existing `fivetran_dbt_git_project_config` resource into your Terraform state, you need to get **Dbt Project ID** via API call `GET https://api.fivetran.com/v1/dbt/projects` to retrieve available projects. +2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/dbt/projects/{project-id}` to ensure that this is the project you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_dbt_git_project_config" "my_imported_fivetran_dbt_git_project_config" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_dbt_git_project_config.my_imported_fivetran_dbt_git_project_config {Dbt Project ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_dbt_git_project_config.my_imported_fivetran_dbt_git_project_config' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/dbt_project.md b/docs/resources/dbt_project.md new file mode 100644 index 00000000..f6eccf5b --- /dev/null +++ b/docs/resources/dbt_project.md @@ -0,0 +1,104 @@ +--- +page_title: "Resource: fivetran_dbt_project" +--- + +# Resource: fivetran_dbt_project + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete dbt Projects in your account. + +## Example Usage + +```hcl +resource "fivetran_dbt_project" "project" { + group_id = "group_id" + dbt_version = "1.4.1" + default_schema = "default_schema" + target_name = "target_name" + environment_vars = ["environment_var=value"] + threads = 8 + type = "GIT" +} +``` + + +## Schema + +### Required + +- `dbt_version` (String) The version of dbt that should run the project. We support the following versions: 0.18.0 - 0.18.2, 0.19.0 - 0.19.2, 0.20.0 - 0.20.2, 0.21.0 - 0.21.1, 1.0.0, 1.0.1, 1.0.3 - 1.0.9, 1.1.0 - 1.1.3, 1.2.0 - 1.2.4, 1.3.0 - 1.3.2, 1.4.1. +- `default_schema` (String) Default schema in destination. This production schema will contain your transformed data. +- `group_id` (String) The unique identifier for the group within the Fivetran system. + +### Optional + +- `ensure_readiness` (Boolean, Deprecated) Should resource wait for project to finish initialization. Default value: true. +- `environment_vars` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" +- `project_config` (Block, Optional, Deprecated) (see [below for nested schema](#nestedblock--project_config)) +- `target_name` (String) Target name to set or override the value from the deployment.yaml +- `threads` (Number) The number of threads dbt will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse. +- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) +- `type` (String) Type of dbt Project. Currently only `GIT` supported. Empty value will be considered as default (GIT). + +### Read-Only + +- `created_at` (String) The timestamp of the dbt Project creation. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the dbt Project. +- `id` (String) The unique identifier for the dbt Project within the Fivetran system. +- `models` (Attributes Set) (see [below for nested schema](#nestedatt--models)) +- `public_key` (String) Public key to grant Fivetran SSH access to git repository. +- `status` (String) Status of dbt Project (NOT_READY, READY, ERROR). + + +### Nested Schema for `project_config` + +Optional: + +- `folder_path` (String) Folder in Git repo with your dbt project +- `git_branch` (String) Git branch +- `git_remote_url` (String) Git remote URL with your dbt project + + + +### Nested Schema for `timeouts` + +Optional: + +- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + + + +### Nested Schema for `models` + +Read-Only: + +- `id` (String) The unique identifier for the dbt Model within the Fivetran system. +- `model_name` (String) The dbt Model name. +- `scheduled` (Boolean) Boolean specifying whether the model is selected for execution in the dashboard. + +## Import + +1. To import an existing `fivetran_dbt_project` resource into your Terraform state, you need to get **Dbt Project ID** via API call `GET https://api.fivetran.com/v1/dbt/projects` to retrieve available projects. +2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/dbt/projects/{project-id}` to ensure that this is the project you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_dbt_project" "my_imported_fivetran_dbt_project" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_dbt_project.my_imported_fivetran_dbt_project {Dbt Project ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_dbt_project.my_imported_fivetran_dbt_project' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/dbt_transformation.md b/docs/resources/dbt_transformation.md new file mode 100644 index 00000000..98845ee1 --- /dev/null +++ b/docs/resources/dbt_transformation.md @@ -0,0 +1,93 @@ +--- +page_title: "Resource: fivetran_dbt_transformation" +--- + +# Resource: fivetran_dbt_transformation + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete dbt Transformations for existing dbt Model. +To retrieve available dbt Models use this [Retrieve dbt Project models](https://fivetran.com/docs/rest-api/dbt-transformation-management#retrievedbtprojectmodels) endpoint. + +## Example Usage + +```hcl +resource "fivetran_dbt_transformation" "transformation" { + dbt_model_name = "dbt_model_name" + dbt_project_id = "dbt_project_id" + run_tests = "false" + paused = "false" + schedule { + schedule_type = "TIME_OF_DAY" + time_of_day = "12:00" + days_of_week = ["MONDAY", "SATURDAY"] + } +} +``` + + +## Schema + +### Required + +- `dbt_model_name` (String) Target dbt Model name. +- `dbt_project_id` (String) The unique identifier for the dbt Project within the Fivetran system. + +### Optional + +- `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. +- `run_tests` (Boolean) The field indicating whether the tests have been configured for dbt Transformation. By default, the value is false. +- `schedule` (Block, Optional) (see [below for nested schema](#nestedblock--schedule)) +- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) + +### Read-Only + +- `connector_ids` (Set of String) Identifiers of related connectors. +- `created_at` (String) The timestamp of the dbt Transformation creation. +- `dbt_model_id` (String) The unique identifier for the dbt Model within the Fivetran system. +- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. +- `model_ids` (Set of String) Identifiers of related models. +- `output_model_name` (String) The dbt Model name. + + +### Nested Schema for `schedule` + +Optional: + +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. +- `interval` (Number) The time interval in minutes between subsequent transformation runs. +- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" + + + +### Nested Schema for `timeouts` + +Optional: + +- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + +## Import + +1. To import an existing `fivetran_dbt_transformation` resource into your Terraform state, you need to get **Transformation ID** on the transformation page in your Fivetran dashboard. +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_dbt_transformation" "my_imported_fivetran_dbt_transformation" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_dbt_transformation.my_imported_fivetran_dbt_transformation {Transformation ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_dbt_transformation.my_imported_fivetran_dbt_transformation' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/destination.md b/docs/resources/destination.md new file mode 100644 index 00000000..9030064a --- /dev/null +++ b/docs/resources/destination.md @@ -0,0 +1,486 @@ +--- +page_title: "Resource: fivetran_destination" +--- + +# Resource: fivetran_destination + +This resource allows you to create, update, and delete destinations. + +IMPORTANT: Groups and destinations are mapped 1:1 to each other. We do this mapping using the group's id value that we automatically generate when you create a group using our REST API, and the destination's group_id value that you specify when you create a destination using our REST API. This means that if you use our REST API to create a destination, you must create a group in your Fivetran account before you can create a destination in it. + +When you create a destination in your Fivetran dashboard, we automatically create a group and assign a value to its id and a destination with the same group_id value, which is unique in your Fivetran account. The group's name corresponds to the Destination name you specify in your Fivetran dashboard when creating the destination in your Fivetran dashboard. + +## Example Usage + +```hcl +resource "fivetran_destination" "dest" { + group_id = fivetran_group.group.id + service = "postgres_rds_warehouse" + time_zone_offset = "0" + region = "GCP_US_EAST4" + trust_certificates = "true" + trust_fingerprints = "true" + daylight_saving_time_enabled = "true" + run_setup_tests = "true" + + config { + host = "destination.fqdn" + port = 5432 + user = "postgres" + password = "myPass" + database = "fivetran" + connection_type = "Directly" + } +} +``` + + +## Schema + +### Required + +- `group_id` (String) The unique identifier for the Group within the Fivetran system. +- `region` (String) Data processing location. This is where Fivetran will operate and run computation on data. +- `service` (String) The destination type id within the Fivetran system. +- `time_zone_offset` (String) Determines the time zone for the Fivetran sync schedule. + +### Optional + +- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) +- `daylight_saving_time_enabled` (Boolean) Shift my UTC offset with daylight savings time (US Only) +- `hybrid_deployment_agent_id` (String) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. +- `local_processing_agent_id` (String, Deprecated) (Deprecated) The hybrid deployment agent ID that refers to the controller created for the group the connection belongs to. If the value is specified, the system will try to associate the connection with an existing agent. +- `networking_method` (String) Possible values: Directly, SshTunnel, ProxyAgent. +- `private_link_id` (String) The private link ID. +- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is TRUE. +- `timeouts` (Block, Optional) (see [below for nested schema](#nestedblock--timeouts)) +- `trust_certificates` (Boolean) Specifies whether we should trust the certificate automatically. The default value is FALSE. If a certificate is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination certificate](https://fivetran.com/docs/rest-api/certificates#approveadestinationcertificate). +- `trust_fingerprints` (Boolean) Specifies whether we should trust the SSH fingerprint automatically. The default value is FALSE. If a fingerprint is not trusted automatically, it has to be approved with [Certificates Management API Approve a destination fingerprint](https://fivetran.com/docs/rest-api/certificates#approveadestinationfingerprint). + +### Read-Only + +- `id` (String) The unique identifier for the destination within the Fivetran system. +- `setup_status` (String) Destination setup status. + + +### Nested Schema for `config` + +Optional: + +- `always_encrypted` (Boolean) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `aurora_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_data_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_database`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_managed_db_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `panoply`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `periscope_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_gcp_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `redshift`: Require TLS through Tunnel + - Service `sql_server_rds_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_warehouse`: Specifies whether TLS is required. Must be populated if `connection_type` is set to `SshTunnel`. +- `auth` (String) Field usage depends on `service` value: + - Service `snowflake`: Password-based or key-based authentication type +- `auth_type` (String) Field usage depends on `service` value: + - Service `databricks`: Authentication type + - Service `redshift`: Authentication type. Default value: `PASSWORD`. +- `bootstrap_servers` (Set of String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Comma-separated list of Confluent Cloud servers in the `server:port` format. +- `bucket` (String) Field usage depends on `service` value: + - Service `big_query`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. + - Service `big_query_dts`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. + - Service `managed_big_query`: Customer bucket. If specified, your GCS bucket will be used to process the data instead of a Fivetran-managed bucket. The bucket must be present in the same location as the dataset location. + - Service `new_s3_datalake`: The name of the bucket to be used as destination +- `catalog` (String) Field usage depends on `service` value: + - Service `databricks`: Catalog name +- `client_id` (String) Field usage depends on `service` value: + - Service `adls`: Client id of service principal + - Service `onelake`: Client ID of service principal +- `cloud_provider` (String) Field usage depends on `service` value: + - Service `databricks`: Databricks deployment cloud +- `cluster_id` (String) Field usage depends on `service` value: + - Service `panoply`: Cluster ID. + - Service `periscope_warehouse`: Cluster ID. + - Service `redshift`: Cluster ID. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. +- `cluster_region` (String) Field usage depends on `service` value: + - Service `panoply`: Cluster region. + - Service `periscope_warehouse`: Cluster region. + - Service `redshift`: Cluster region. Must be populated if `connection_type` is set to `SshTunnel` and `auth_type` is set to `IAM`. +- `connection_method` (String) +- `connection_type` (String) Field usage depends on `service` value: + - Service `adls`: Connection method. Default value: `Directly`. + - Service `aurora_postgres_warehouse`: Connection method. Default value: `Directly`. + - Service `aurora_warehouse`: Connection method. Default value: `Directly`. + - Service `azure_postgres_warehouse`: Connection method. Default value: `Directly`. + - Service `azure_sql_data_warehouse`: Connection method. Default value: `Directly`. + - Service `azure_sql_database`: Connection method. Default value: `Directly`. + - Service `azure_sql_managed_db_warehouse`: Connection method. Default value: `Directly`. + - Service `databricks`: Connection method. Default value: `Directly`. + - Service `maria_rds_warehouse`: Connection method. Default value: `Directly`. + - Service `maria_warehouse`: Connection method. Default value: `Directly`. + - Service `mysql_rds_warehouse`: Connection method. Default value: `Directly`. + - Service `mysql_warehouse`: Connection method. Default value: `Directly`. + - Service `panoply`: Connection method. Default value: `Directly`. + - Service `periscope_warehouse`: Connection method. Default value: `Directly`. + - Service `postgres_gcp_warehouse`: Connection method. Default value: `Directly`. + - Service `postgres_rds_warehouse`: Connection method. Default value: `Directly`. + - Service `postgres_warehouse`: Connection method. Default value: `Directly`. + - Service `redshift`: Connection method. Default value: `Directly`. + - Service `snowflake`: Connection method. Default value: `Directly`. + - Service `sql_server_rds_warehouse`: Connection method. Default value: `Directly`. + - Service `sql_server_warehouse`: Connection method. Default value: `Directly`. +- `container_name` (String) Field usage depends on `service` value: + - Service `adls`: Container to store delta table files + - Service `onelake`: Workspace name to store delta table files +- `controller_id` (String) +- `create_external_tables` (Boolean) Field usage depends on `service` value: + - Service `databricks`: Whether to create external tables +- `data_format` (String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Confluent Cloud message format. +- `data_set_location` (String) Field usage depends on `service` value: + - Service `big_query`: Data location. Datasets will reside in this location. + - Service `big_query_dts`: Data location. Datasets will reside in this location. + - Service `managed_big_query`: Data location. Datasets will reside in this location. +- `database` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Database name + - Service `aurora_warehouse`: Database name + - Service `azure_postgres_warehouse`: Database name + - Service `azure_sql_data_warehouse`: Database name + - Service `azure_sql_database`: Database name + - Service `azure_sql_managed_db_warehouse`: Database name + - Service `maria_rds_warehouse`: Database name + - Service `maria_warehouse`: Database name + - Service `mysql_rds_warehouse`: Database name + - Service `mysql_warehouse`: Database name + - Service `panoply`: Database name + - Service `periscope_warehouse`: Database name + - Service `postgres_gcp_warehouse`: Database name + - Service `postgres_rds_warehouse`: Database name + - Service `postgres_warehouse`: Database name + - Service `redshift`: Database name + - Service `snowflake`: Database name + - Service `sql_server_rds_warehouse`: Database name + - Service `sql_server_warehouse`: Database name +- `enable_remote_execution` (Boolean) +- `external_location` (String) Field usage depends on `service` value: + - Service `databricks`: External location to store Delta tables. Default value: `""` (null). By default, the external tables will reside in the `/{schema}/{table}` path, and if you specify an external location in the `{externalLocation}/{schema}/{table}` path. +- `fivetran_glue_role_arn` (String) +- `fivetran_msk_role_arn` (String) +- `fivetran_role_arn` (String) Field usage depends on `service` value: + - Service `new_s3_datalake`: ARN of the role which you created with different required policy mentioned in our setup guide +- `host` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Server name + - Service `aurora_warehouse`: Server name + - Service `azure_postgres_warehouse`: Server name + - Service `azure_sql_data_warehouse`: Server name + - Service `azure_sql_database`: Server name + - Service `azure_sql_managed_db_warehouse`: Server name + - Service `maria_rds_warehouse`: Server name + - Service `maria_warehouse`: Server name + - Service `mysql_rds_warehouse`: Server name + - Service `mysql_warehouse`: Server name + - Service `panoply`: Server name + - Service `periscope_warehouse`: Server name + - Service `postgres_gcp_warehouse`: Server name + - Service `postgres_rds_warehouse`: Server name + - Service `postgres_warehouse`: Server name + - Service `redshift`: Server name + - Service `snowflake`: Server name + - Service `sql_server_rds_warehouse`: Server name + - Service `sql_server_warehouse`: Server name +- `http_path` (String) Field usage depends on `service` value: + - Service `databricks`: HTTP path +- `is_private_key_encrypted` (Boolean) Field usage depends on `service` value: + - Service `snowflake`: Indicates that a private key is encrypted. The default value: `false`. The field can be specified if authentication type is `KEY_PAIR`. +- `is_private_link_required` (Boolean) Field usage depends on `service` value: + - Service `new_s3_datalake`: We use PrivateLink by default if your s3 bucket is in the same region as Fivetran. Turning on this toggle ensures that Fivetran always connects to s3 bucket over PrivateLink. Learn more in our [PrivateLink documentation](https://fivetran.com/docs/connectors/databases/connection-options#awsprivatelinkbeta). +- `is_redshift_serverless` (Boolean) Field usage depends on `service` value: + - Service `redshift`: Is your destination Redshift Serverless +- `lakehouse_name` (String) Field usage depends on `service` value: + - Service `onelake`: Name of your lakehouse +- `msk_sts_region` (String) +- `num_of_partitions` (Number) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Number of partitions per topic. +- `oauth2_client_id` (String) Field usage depends on `service` value: + - Service `databricks`: OAuth 2.0 client ID +- `oauth2_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `databricks`: OAuth 2.0 secret +- `passphrase` (String, Sensitive) Field usage depends on `service` value: + - Service `snowflake`: In case private key is encrypted, you are required to enter passphrase that was used to encrypt the private key. The field can be specified if authentication type is `KEY_PAIR`. +- `password` (String, Sensitive) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Database user password + - Service `aurora_warehouse`: Database user password + - Service `azure_postgres_warehouse`: Database user password + - Service `azure_sql_data_warehouse`: Database user password + - Service `azure_sql_database`: Database user password + - Service `azure_sql_managed_db_warehouse`: Database user password + - Service `maria_rds_warehouse`: Database user password + - Service `maria_warehouse`: Database user password + - Service `mysql_rds_warehouse`: Database user password + - Service `mysql_warehouse`: Database user password + - Service `panoply`: Database user password + - Service `periscope_warehouse`: Database user password + - Service `postgres_gcp_warehouse`: Database user password + - Service `postgres_rds_warehouse`: Database user password + - Service `postgres_warehouse`: Database user password + - Service `redshift`: Database user password. Required if authentication type is `PASSWORD`. + - Service `snowflake`: Database user password. The field should be specified if authentication type is `PASSWORD`. + - Service `sql_server_rds_warehouse`: Database user password + - Service `sql_server_warehouse`: Database user password +- `personal_access_token` (String, Sensitive) Field usage depends on `service` value: + - Service `databricks`: Personal access token +- `port` (Number) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Server port number + - Service `aurora_warehouse`: Server port number + - Service `azure_postgres_warehouse`: Server port number + - Service `azure_sql_data_warehouse`: Server port number + - Service `azure_sql_database`: Server port number + - Service `azure_sql_managed_db_warehouse`: Server port number + - Service `databricks`: Server port number + - Service `maria_rds_warehouse`: Server port number + - Service `maria_warehouse`: Server port number + - Service `mysql_rds_warehouse`: Server port number + - Service `mysql_warehouse`: Server port number + - Service `panoply`: Server port number + - Service `periscope_warehouse`: Server port number + - Service `postgres_gcp_warehouse`: Server port number + - Service `postgres_rds_warehouse`: Server port number + - Service `postgres_warehouse`: Server port number + - Service `redshift`: Server port number + - Service `snowflake`: Server port number + - Service `sql_server_rds_warehouse`: Server port number + - Service `sql_server_warehouse`: Server port number +- `prefix_path` (String) Field usage depends on `service` value: + - Service `adls`: path/to/data within the container + - Service `new_s3_datalake`: Prefix path of the bucket for which you have configured access policy. It is not required if access has been granted to entire Bucket in the access policy + - Service `onelake`: path/to/data within your lakehouse inside the Files directory +- `private_key` (String, Sensitive) Field usage depends on `service` value: + - Service `snowflake`: Private access key. The field should be specified if authentication type is `KEY_PAIR`. +- `project_id` (String) Field usage depends on `service` value: + - Service `big_query`: BigQuery project ID +- `region` (String) Field usage depends on `service` value: + - Service `new_s3_datalake`: Region of your AWS S3 bucket +- `registry_name` (String) +- `registry_sts_region` (String) +- `replication_factor` (Number) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Replication factor. +- `resource_id` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `aurora_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `azure_postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `azure_sql_data_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `azure_sql_database`: Field to test Self serve Private Link + - Service `azure_sql_managed_db_warehouse`: Field to test Self serve Private Link + - Service `databricks`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `maria_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `maria_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `mysql_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `mysql_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `panoply`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `periscope_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `postgres_gcp_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `postgres_rds_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `postgres_warehouse`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `redshift`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `snowflake`: This field is currently being introduced to test the Self-serve Private Link functionality + - Service `sql_server_rds_warehouse`: Field to test Self serve Private Link + - Service `sql_server_warehouse`: Field to test Self serve Private Link +- `role` (String) Field usage depends on `service` value: + - Service `snowflake`: If not specified, Fivetran will use the user's default role +- `role_arn` (String, Sensitive) Field usage depends on `service` value: + - Service `redshift`: Role ARN with Redshift permissions. Required if authentication type is `IAM`. +- `sasl_mechanism` (String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Security protocol for Confluent Cloud interaction. +- `sasl_plain_key` (String, Sensitive) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Confluent Cloud SASL key. +- `sasl_plain_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Confluent Cloud SASL secret. +- `schema_compatibility` (String) +- `schema_registry` (String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Schema Registry +- `schema_registry_api_key` (String, Sensitive) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Schema registry API key. +- `schema_registry_api_secret` (String, Sensitive) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Schema registry API secret. +- `schema_registry_url` (String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Schema registry URL. +- `secret_key` (String, Sensitive) Field usage depends on `service` value: + - Service `big_query`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. + - Service `big_query_dts`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. + - Service `managed_big_query`: Private key of the customer service account. If specified, your service account will be used to process the data instead of the Fivetran-managed service account. +- `secret_value` (String, Sensitive) Field usage depends on `service` value: + - Service `adls`: Secret value for service principal + - Service `onelake`: Secret value for service principal +- `security_protocol` (String) Field usage depends on `service` value: + - Service `confluent_cloud_wh`: Security protocol for Confluent Cloud interaction. +- `server_host_name` (String) Field usage depends on `service` value: + - Service `databricks`: Server name +- `snapshot_retention_period` (String) Field usage depends on `service` value: + - Service `adls`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. + - Service `new_s3_datalake`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. + - Service `onelake`: Snapshots older than the retention period are deleted every week. Default value: `ONE_WEEK`. +- `snowflake_cloud` (String) +- `snowflake_region` (String) +- `storage_account_name` (String) Field usage depends on `service` value: + - Service `adls`: Storage account for Azure Data Lake Storage Gen2 name + - Service `onelake`: Storage account for Azure Data Lake Storage Gen2 name +- `table_format` (String) Field usage depends on `service` value: + - Service `new_s3_datalake`: (Immutable) The table format in which you want to sync your tables. Valid values are ICEBERG and DELTA_LAKE +- `tenant_id` (String) Field usage depends on `service` value: + - Service `adls`: Tenant id of service principal + - Service `onelake`: Tenant ID of service principal +- `tunnel_host` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `aurora_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_data_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_database`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_managed_db_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `panoply`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `periscope_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_gcp_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `redshift`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_rds_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_warehouse`: SSH server name. Must be populated if `connection_type` is set to `SshTunnel`. +- `tunnel_port` (Number) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `aurora_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_data_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_database`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_managed_db_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `panoply`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `periscope_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_gcp_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `redshift`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_rds_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_warehouse`: SSH server port name. Must be populated if `connection_type` is set to `SshTunnel`. +- `tunnel_user` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `aurora_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_data_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_database`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `azure_sql_managed_db_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `maria_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `mysql_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `panoply`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `periscope_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_gcp_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `postgres_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `redshift`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_rds_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. + - Service `sql_server_warehouse`: SSH user name. Must be populated if `connection_type` is set to `SshTunnel`. +- `user` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Database user name + - Service `aurora_warehouse`: Database user name + - Service `azure_postgres_warehouse`: Database user name + - Service `azure_sql_data_warehouse`: Database user name + - Service `azure_sql_database`: Database user name + - Service `azure_sql_managed_db_warehouse`: Database user name + - Service `maria_rds_warehouse`: Database user name + - Service `maria_warehouse`: Database user name + - Service `mysql_rds_warehouse`: Database user name + - Service `mysql_warehouse`: Database user name + - Service `panoply`: Database user name + - Service `periscope_warehouse`: Database user name + - Service `postgres_gcp_warehouse`: Database user name + - Service `postgres_rds_warehouse`: Database user name + - Service `postgres_warehouse`: Database user name + - Service `redshift`: Database user name + - Service `snowflake`: Database user name + - Service `sql_server_rds_warehouse`: Database user name + - Service `sql_server_warehouse`: Database user name +- `workspace_name` (String) Field usage depends on `service` value: + - Service `onelake`: OneLake workspace name + +Read-Only: + +- `external_id` (String) Field usage depends on `service` value: + - Service `aws_msk_wh`: Fivetran generated External ID + - Service `panoply`: Fivetran generated External ID + - Service `periscope_warehouse`: Fivetran generated External ID + - Service `redshift`: Fivetran generated External ID +- `public_key` (String) Field usage depends on `service` value: + - Service `aurora_postgres_warehouse`: Public Key + - Service `aurora_warehouse`: Public Key + - Service `azure_postgres_warehouse`: Public Key + - Service `azure_sql_data_warehouse`: Public Key + - Service `azure_sql_database`: Public Key + - Service `azure_sql_managed_db_warehouse`: Public Key + - Service `maria_rds_warehouse`: Public Key + - Service `maria_warehouse`: Public Key + - Service `mysql_rds_warehouse`: Public Key + - Service `mysql_warehouse`: Public Key + - Service `panoply`: Public Key + - Service `periscope_warehouse`: Public Key + - Service `postgres_gcp_warehouse`: Public Key + - Service `postgres_rds_warehouse`: Public Key + - Service `postgres_warehouse`: Public Key + - Service `redshift`: Public Key + - Service `sql_server_rds_warehouse`: Public Key + - Service `sql_server_warehouse`: Public Key + + + +### Nested Schema for `timeouts` + +Optional: + +- `create` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). +- `update` (String) A string that can be [parsed as a duration](https://pkg.go.dev/time#ParseDuration) consisting of numbers and unit suffixes, such as "30s" or "2h45m". Valid time units are "s" (seconds), "m" (minutes), "h" (hours). + +## Setup tests + +The `run_setup_tests` field doesn't have upstream value, it only defines local resource behavoir. This means that when you update only the `run_setup_tests` value (from `false` to `true`, for example) it won't cause any upstream actions. The value will be just saved in terraform state and then used on effective field updates. + +The default value is `false` - this means that no setup tests will be performed during create/update. To perform setup tests, you should set value to `true`. + +## Import + +1. To import an existing `fivetran_destination` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. +To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_destination" "my_imported_destination" { + +} +``` + +3. Run the `terraform import` command with the following parameters: + +``` +terraform import fivetran_destination.my_imported_destination {your Destination Group ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_destination.my_imported_destination' +``` +5. Copy the values and paste them to your `.tf` configuration. + +-> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to destinations. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/destinations/config) for reference to find the properties you need to keep in the `config` section. \ No newline at end of file diff --git a/docs/resources/destination_certificates.md b/docs/resources/destination_certificates.md new file mode 100644 index 00000000..566d343e --- /dev/null +++ b/docs/resources/destination_certificates.md @@ -0,0 +1,46 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "fivetran_destination_certificates Resource - terraform-provider-fivetran" +subcategory: "" +description: |- + +--- + +# fivetran_destination_certificates (Resource) + + + + + + +## Schema + +### Required + +- `destination_id` (String) The unique identifier for the target destination within the Fivetran system. + +### Optional + +- `certificate` (Block Set) (see [below for nested schema](#nestedblock--certificate)) + +### Read-Only + +- `id` (String) The unique identifier for the resource. Equal to target destination id. + + +### Nested Schema for `certificate` + +Required: + +- `encoded_cert` (String, Sensitive) Base64 encoded certificate. +- `hash` (String) Hash of the certificate. + +Read-Only: + +- `name` (String) Certificate name. +- `public_key` (String) The SSH public key. +- `sha1` (String) Certificate sha1. +- `sha256` (String) Certificate sha256. +- `type` (String) Type of the certificate. +- `validated_by` (String) User name who validated the certificate. +- `validated_date` (String) The date when certificate was approved. diff --git a/docs/resources/destination_fingerprints.md b/docs/resources/destination_fingerprints.md new file mode 100644 index 00000000..cab7602e --- /dev/null +++ b/docs/resources/destination_fingerprints.md @@ -0,0 +1,79 @@ +--- +page_title: "Resource: fivetran_destination_fingerprints" +--- + +# Resource: fivetran_destination_fingerprints + +This resource allows you to manage list of approved SSH fingerprints for a particular destination. + +## Example Usage + +```hcl +resource "fivetran_destination_fingerprints" "my_destination_approved_fingerprints" { + destination_id = fivetran_destination.my_destination.id + fingerprint { + hash = "jhgfJfgrI6yy..." + public_key= "ssh-rsa CCCCB3NzaC1yc2ECCASFWFWDFRWT5WAS ... fivetran user key" + } + fingerprint { + hash = "eUtPirI6yytWe..." + public_key= "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6 ... fivetran user key" + } +} +``` + + +## Schema + +### Required + +- `destination_id` (String) The unique identifier for the target destination within the Fivetran system. + +### Optional + +- `fingerprint` (Block Set) (see [below for nested schema](#nestedblock--fingerprint)) + +### Read-Only + +- `id` (String) The unique identifier for the resource. Equal to target destination id. + + +### Nested Schema for `fingerprint` + +Required: + +- `hash` (String) Hash of the fingerprint. +- `public_key` (String) The SSH public key. + +Read-Only: + +- `validated_by` (String) User name who validated the fingerprint. +- `validated_date` (String) The date when fingerprint was approved. + +## Import + +1. To import an existing `fivetran_destination_fingerprints` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. + +2. To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). + +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_destination_fingerprints" "my_imported_destination_fingerprints" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_destination_fingerprints.my_imported_destination_fingerprints {your Destination Group ID} +``` + +5. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_destination_fingerprints.my_imported_destination_fingerprints' +``` + +6. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/external_logging.md b/docs/resources/external_logging.md new file mode 100644 index 00000000..8aa0fefa --- /dev/null +++ b/docs/resources/external_logging.md @@ -0,0 +1,95 @@ +--- +page_title: "Resource: fivetran_external_logging" +--- + +# Resource: fivetran_external_logging + +This resource allows you to create, update, and delete logging service. + +## Example Usage + +```hcl +resource "fivetran_external_logging" "extlog" { + group_id = fivetran_group.group.id + service = "azure_monitor_log" + enabled = "true" + run_setup_tests = "true" + + config { + workspace_id = "workspace_id" + primary_key = "PASSWORD" + } +} +``` + + +## Schema + +### Required + +- `group_id` (String) The unique identifier for the log service within the Fivetran system. +- `service` (String) The name for the log service type within the Fivetran system. We support the following log services: azure_monitor_log, cloudwatch, datadog_log, new_relic_log, splunkLog, stackdriver. + +### Optional + +- `config` (Block, Optional) (see [below for nested schema](#nestedblock--config)) +- `enabled` (Boolean) The boolean value specifying whether the log service is enabled. +- `run_setup_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is TRUE. + +### Read-Only + +- `id` (String) The unique identifier for the log service within the Fivetran system. + + +### Nested Schema for `config` + +Optional: + +- `api_key` (String, Sensitive) API Key +- `channel` (String) Channel +- `enable_ssl` (Boolean) Enable SSL +- `external_id` (String) external_id +- `host` (String) Server name +- `hostname` (String) Server name +- `log_group_name` (String) Log Group Name +- `port` (Number) Port +- `primary_key` (String, Sensitive) Primary Key +- `project_id` (String) Project Id for Google Cloud Logging +- `region` (String) Region +- `role_arn` (String) Role Arn +- `sub_domain` (String) Sub Domain +- `token` (String, Sensitive) Token +- `workspace_id` (String) Workspace ID + +## Setup tests + +The `run_setup_tests` field doesn't have upstream value, it only defines local resource behavoir. This means that when you update only the `run_setup_tests` value (from `false` to `true`, for example) it won't cause any upstream actions. The value will be just saved in terraform state and then used on effective field updates. + +The default value is `false` - this means that no setup tests will be performed during create/update. To perform setup tests, you should set value to `true`. + +## Import + +1. To import an existing `fivetran_external_logging` resource into your Terraform state, you need to get **External Logging Group ID** on the external logging page in your Fivetran dashboard. +To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_external_logging" "my_imported_external_logging" { + +} +``` + +3. Run the `terraform import` command with the following parameters: + +``` +terraform import fivetran_external_logging.my_imported_external_logging {your External Logging Group ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_external_logging.my_imported_external_logging' +``` +5. Copy the values and paste them to your `.tf` configuration. + +-> The `config` object in the state contains all properties defined in the schema. You need to remove properties from the `config` that are not related to destinations. See the [Fivetran REST API documentation](https://fivetran.com/docs/rest-api/log-service-management#logservicesetupconfigurations) for reference to find the properties you need to keep in the `config` section. \ No newline at end of file diff --git a/docs/resources/group.md b/docs/resources/group.md new file mode 100644 index 00000000..f60c4230 --- /dev/null +++ b/docs/resources/group.md @@ -0,0 +1,58 @@ +--- +page_title: "Resource: fivetran_group" +--- + +# Resource: fivetran_group + +This resource allows you to create, update, and delete groups. + +IMPORTANT: Groups and destinations are mapped 1:1 to each other. We do this mapping using the group's id value that we automatically generate when you create a group using our Terrafrom Provider, and the destination's group_id value that you specify when you create a destination using our Terrafrom Provider. This means that if you use our Terrafrom Provider to create a destination, you must create a group in your Fivetran account before you can create a destination in it. + +When you create a destination in your Fivetran dashboard, we automatically create a group and assign a value to its id and a destination with the same group_id value, which is unique in your Fivetran account. The group's name corresponds to the Destination name you specify in your Fivetran dashboard when creating the destination in your Fivetran dashboard. + +## Example Usage + +```hcl +resource "fivetran_group" "group" { + name = "MyGroup" +} +``` + + +## Schema + +### Required + +- `name` (String) The name of the group within your account. + +### Read-Only + +- `created_at` (String) The timestamp of when the group was created in your account. +- `id` (String) The unique identifier for the group within the Fivetran system. +- `last_updated` (String) The timestamp of when the resource/datasource was updated last time. + +## Import + +1. To import an existing `fivetran_group` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. +To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_group" "my_imported_fivetran_group" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_group.my_imported_fivetran_group {your Destination Group ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_group.my_imported_fivetran_group' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/group_users.md b/docs/resources/group_users.md new file mode 100644 index 00000000..3cd0ab79 --- /dev/null +++ b/docs/resources/group_users.md @@ -0,0 +1,78 @@ +--- +page_title: "Resource: fivetran_group_users" +--- + +# Resource: fivetran_group_users + +This resource allows you to create, update, and delete user memberships in groups. + +## Example Usage + +```hcl +resource "fivetran_group_users" "group_users" { + group_id = fivetran_group.group.id + + user { + email = "mail@example.com" + role = "Destination Analyst" + } + + user { + email = "another_mail@example.com" + role = "Destination Analyst" + } +} +``` + + +## Schema + +### Required + +- `group_id` (String) The unique identifier for the Group within the Fivetran system. + +### Optional + +- `user` (Block Set) (see [below for nested schema](#nestedblock--user)) + +### Read-Only + +- `id` (String) The unique identifier for the resource. +- `last_updated` (String) + + +### Nested Schema for `user` + +Required: + +- `email` (String) The email address that the user has associated with their user profile. +- `role` (String) The group role that you would like to assign this new user to. Supported group roles: ‘Destination Administrator‘, ‘Destination Reviewer‘, ‘Destination Analyst‘, ‘Connector Creator‘, or a custom destination role + +Read-Only: + +- `id` (String) The unique identifier for the user within the account. + +-## Import + +1. To import an existing `fivetran_group_users` resource into your Terraform state, you need to get **Destination Group ID** on the destination page in your Fivetran dashboard. +To retrieve existing groups, use the [fivetran_groups data source](/docs/data-sources/groups). +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_group_users" "my_imported_fivetran_group_users" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_group_users.my_imported_fivetran_group_users {your Destination Group ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_group_users.my_imported_fivetran_group_users' +``` +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/hybrid_deployment_agent.md b/docs/resources/hybrid_deployment_agent.md new file mode 100644 index 00000000..2b4ea831 --- /dev/null +++ b/docs/resources/hybrid_deployment_agent.md @@ -0,0 +1,41 @@ +--- +page_title: "Resource: fivetran_hybrid_deployment_agent" +--- + +# Resource: fivetran_hybrid_deployment_agent + +This resource allows you to create, update, and delete hybrid deployment agents. + +## Example Usage + +```hcl +resource "fivetran_hybrid_deployment_agent" "hybrid_deployment_agent" { + provider = fivetran-provider + + display_name = "display_name" + group_id = "group_id" + auth_type = "AUTO" +} +``` + + +## Schema + +### Required + +- `auth_type` (String) Type of authentification. Possible values `AUTO`,`MANUAL` +- `display_name` (String) The unique name for the hybrid deployment agent. +- `group_id` (String) The unique identifier for the Group within the Fivetran system. + +### Optional + +- `authentication_counter` (Number) Determines whether re-authentication needs to be performed. + +### Read-Only + +- `auth_json` (String) Base64-encoded content of the auth.json file. +- `config_json` (String) Base64-encoded content of the config.json file. +- `docker_compose_yaml` (String) Base64-encoded content of the compose file for the chosen containerization type. +- `id` (String) The unique identifier for the hybrid deployment agent within your account. +- `registered_at` (String) The timestamp of the time the hybrid deployment agent was created in your account. +- `token` (String) Base64 encoded content of token. \ No newline at end of file diff --git a/docs/resources/local_processing_agent.md b/docs/resources/local_processing_agent.md new file mode 100644 index 00000000..9205511b --- /dev/null +++ b/docs/resources/local_processing_agent.md @@ -0,0 +1,54 @@ +--- +page_title: "Resource: fivetran_local_processing_agent" +--- + +# Resource: fivetran_local_processing_agent + +NOTE: In connection with the general availability of the hybrid deployment functionality and in order to synchronize internal terminology, we have deprecate this resource. + +This resource allows you to create, update, and delete local processing agents. + + +## Example Usage + +```hcl +resource "fivetran_local_processing_agent" "test_lpa" { + provider = fivetran-provider + + display_name = "display_name" + group_id = "group_id" +} +``` + + +## Schema + +### Required + +- `display_name` (String) The unique name for the local processing agent. +- `group_id` (String) The unique identifier for the Group within the Fivetran system. + +### Optional + +- `authentication_counter` (Number) Determines whether re-authentication needs to be performed. + +### Read-Only + +- `auth_json` (String) Base64-encoded content of the auth.json file. +- `config_json` (String) Base64-encoded content of the config.json file. +- `docker_compose_yaml` (String) Base64-encoded content of the compose file for the chosen containerization type. +- `id` (String) The unique identifier for the local processing agent within your account. +- `registered_at` (String) The timestamp of the time the local processing agent was created in your account. +- `usage` (Attributes Set) (see [below for nested schema](#nestedatt--usage)) + + +### Nested Schema for `usage` + +Required: + +- `schema` (String) The connection schema name. +- `service` (String) The connection type. + +Read-Only: + +- `connection_id` (String) The unique identifier of the connection associated with the agent. \ No newline at end of file diff --git a/docs/resources/private_link.md b/docs/resources/private_link.md new file mode 100644 index 00000000..8e11cca7 --- /dev/null +++ b/docs/resources/private_link.md @@ -0,0 +1,55 @@ +--- +page_title: "Resource: fivetran_private_link" +--- + +# Resource: fivetran_private_link + +This resource allows you to create, update, and delete private links. + +## Example Usage + +```hcl +resource "fivetran_private_link" "test_pl" { + provider = fivetran-provider + + name = "name" + region = "region" + service = "service" + + config { + connection_service_name = "connection_service_name" + } +} +``` + + +## Schema + +### Required + +- `config_map` (Map of String) Configuration. + +#### Possible values +-- `connection_service_name` (String): The name of your connection service. +-- `account_url` (String): The URL of your account. +-- `vpce_id` (String): The ID of your Virtual Private Cloud Endpoint. +-- `aws_account_id` (String): The ID of your AWS account. +-- `cluster_identifier` (String): The cluster identifier. +-- `connection_service_id` (String): The ID of your connection service. +-- `workspace_url` (String): The URL of your workspace. +-- `pls_id` (String): The ID of your Azure Private Link service. +-- `sub_resource_name` (String): The name of subresource. +-- `private_dns_regions` (String): Private DNS Regions. +-- `private_connection_service_id` (String): The ID of your connection service. +- `name` (String) The private link name within the account. The name must start with a letter or underscore and can only contain letters, numbers, or underscores. Maximum size of name is 23 characters. +- `region` (String) Data processing location. This is where Fivetran will operate and run computation on data. +- `service` (String) Service type. + +### Read-Only + +- `cloud_provider` (String) The cloud provider name. +- `created_at` (String) The date and time the membership was created. +- `created_by` (String) The unique identifier for the User within the Fivetran system. +- `id` (String) The unique identifier for the private link within the Fivetran system. +- `state` (String) The state of the private link. +- `state_summary` (String) The state of the private link. diff --git a/docs/resources/proxy_agent.md b/docs/resources/proxy_agent.md new file mode 100644 index 00000000..40e70b15 --- /dev/null +++ b/docs/resources/proxy_agent.md @@ -0,0 +1,35 @@ +--- +page_title: "Resource: fivetran_proxy_agent" +--- + +# Resource: fivetran_proxy_agent + +This resource allows you to create, update, and delete proxy agent. + +## Example Usage + +```hcl +resource "fivetran_proxy_agent" "test_proxy_agent" { + provider = fivetran-provider + + display_name = "display_name" + group_region = "group_region" +} +``` + + +## Schema + +### Required + +- `display_name` (String) Proxy agent name. +- `group_region` (String) Data processing location. This is where Fivetran will operate and run computation on data. + +### Read-Only + +- `created_by` (String) The actor who created the proxy agent. +- `id` (String) The unique identifier for the proxy within your account. +- `proxy_server_uri` (String) The proxy server URI. +- `registred_at` (String) The timestamp of the time the proxy agent was created in your account. +- `salt` (String) The salt. +- `token` (String) The auth token. diff --git a/docs/resources/team.md b/docs/resources/team.md new file mode 100644 index 00000000..61b5aefb --- /dev/null +++ b/docs/resources/team.md @@ -0,0 +1,61 @@ +--- +page_title: "Resource: fivetran_team" +--- + +# Resource: fivetran_team + +This resource allows you to create, update, and delete teams. + +## Example Usage + +```hcl +resource "fivetran_team" "test_team" { + provider = fivetran-provider + + name = "test_team" + description = "test_description" + role = "Account Reviewer" +} +``` + + +## Schema + +### Required + +- `name` (String) The name of the team within your account. +- `role` (String) The account role of the team. + +### Optional + +- `description` (String) The description of the team within your account. + +### Read-Only + +- `id` (String) The unique identifier for the team within your account. + +## Import + +1. To import an existing `fivetran_team` resource into your Terraform state, you need to get `team_id`. +You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_team" "my_imported_fivetran_team" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_team.my_imported_fivetran_team {team_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_team.my_imported_fivetran_team' +``` +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/team_connector_membership.md b/docs/resources/team_connector_membership.md new file mode 100644 index 00000000..144e7c61 --- /dev/null +++ b/docs/resources/team_connector_membership.md @@ -0,0 +1,82 @@ +--- +page_title: "Resource: fivetran_team_connector_membership" +--- + +# Resource: fivetran_team_connector_membership + +This resource allows you to create, update, and delete connector membership for teams + +## Example Usage + +```hcl +resource "fivetran_team_connector_membership" "test_team_connector_membership" { + provider = fivetran-provider + + team_id = "test_team" + + connector { + connector_id = "test_connector" + role = "Connector Administrator" + created_at = "2020-05-25T15:26:47.306509Z" + } + + connector { + connector_id = "test_connector" + role = "Connector Administrator" + created_at = "2020-05-25T15:26:47.306509Z" + } +} +``` + + +## Schema + +### Required + +- `team_id` (String) The unique identifier for the team within your account. + +### Optional + +- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) + +### Read-Only + +- `id` (String) The unique identifier for resource. + + +### Nested Schema for `connector` + +Required: + +- `connector_id` (String) The connector unique identifier +- `role` (String) The team's role that links the team and the connector + +Read-Only: + +- `created_at` (String) The date and time the membership was created + +## Import + +1. To import an existing `fivetran_team_connector_membership` resource into your Terraform state, you need to get `team_id` and `connector_id` +You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_team_connector_membership" "my_imported_fivetran_team_connector_membership" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_team_connector_membership.my_imported_fivetran_team_connector_membership {team_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_team_connector_membership.my_imported_fivetran_team_connector_membership' +``` +5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/team_group_membership.md b/docs/resources/team_group_membership.md new file mode 100644 index 00000000..3fef8578 --- /dev/null +++ b/docs/resources/team_group_membership.md @@ -0,0 +1,82 @@ +--- +page_title: "Resource: fivetran_team_group_membership" +--- + +# Resource: fivetran_team_group_membership + +This resource allows you to create, update, and delete group membership for teams + +## Example Usage + +```hcl +resource "fivetran_team_group_membership" "test_team_group_membership" { + provider = fivetran-provider + + team_id = "test_team" + + group { + connector_id = "test_connector" + group_id = "test_group" + role = "Destination Administrator" + } + + group { + connector_id = "test_connector" + group_id = "test_group" + role = "Destination Administrator" + } +} +``` + + +## Schema + +### Required + +- `team_id` (String) The unique identifier for the team within your account. + +### Optional + +- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) + +### Read-Only + +- `id` (String) The unique identifier for resource. + + +### Nested Schema for `group` + +Required: + +- `group_id` (String) The group unique identifier +- `role` (String) The team's role that links the team and the group + +Read-Only: + +- `created_at` (String) The date and time the membership was created + +## Import + +1. To import an existing `fivetran_team_group_membership` resource into your Terraform state, you need to get `team_id` and `group_id` +You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_team_group_membership" "my_imported_fivetran_team_group_membership" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_team_group_membership.my_imported_fivetran_team_group_membership {team_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_team_group_membership.my_imported_fivetran_team_group_membership' +``` +5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/team_user_membership.md b/docs/resources/team_user_membership.md new file mode 100644 index 00000000..15a1b814 --- /dev/null +++ b/docs/resources/team_user_membership.md @@ -0,0 +1,76 @@ +--- +page_title: "Resource: fivetran_team_user_membership" +--- + +# Resource: fivetran_team_user_membership + +This resource allows you to create, update, and delete user membership for teams + +## Example Usage + +```hcl +resource "fivetran_team_user_membership" "test_team_user_membership" { + provider = fivetran-provider + + team_id = "test_team" + + user { + user_id = "test_user" + role = "Connector Administrator" + } + + user { + user_id = "test_user" + role = "Connector Administrator" + } +} +``` + + +## Schema + +### Required + +- `team_id` (String) The unique identifier for the team within your account. + +### Optional + +- `user` (Block Set) (see [below for nested schema](#nestedblock--user)) + +### Read-Only + +- `id` (String) The unique identifier for resource. + + +### Nested Schema for `user` + +Required: + +- `role` (String) The team's role that links the team and the user +- `user_id` (String) The user unique identifier + +## Import + +1. To import an existing `fivetran_team_user_membership` resource into your Terraform state, you need to get `team_id` and `user_id` +You can retrieve all teams using the [fivetran_teams data source](/docs/data-sources/teams). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_team_user_membership" "my_imported_fivetran_team_user_membership" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_team_user_membership.my_imported_fivetran_team_user_membership {team_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_team_user_membership.my_imported_fivetran_team_user_membership' +``` +5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/transformation_project.md b/docs/resources/transformation_project.md new file mode 100644 index 00000000..28bb4109 --- /dev/null +++ b/docs/resources/transformation_project.md @@ -0,0 +1,96 @@ +--- +page_title: "Resource: fivetran_transformation_project" +--- + +# Resource: fivetran_transformation_project + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete transformation projects in your account. + +## Example Usage + +```hcl +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + + +## Schema + +### Required + +- `group_id` (String) The unique identifier for the group within the Fivetran system. +- `type` (String) Transformation project type. + +### Optional + +- `project_config` (Block, Optional) (see [below for nested schema](#nestedblock--project_config)) +- `run_tests` (Boolean) Specifies whether the setup tests should be run automatically. The default value is TRUE. + +### Read-Only + +- `created_at` (String) The timestamp of the transformation Project creation. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the dbt Project. +- `errors` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" +- `id` (String) The unique identifier for the transformation Project within the Fivetran system. +- `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). + + +### Nested Schema for `project_config` + +Optional: + +- `dbt_version` (String) The version of transformation that should run the project +- `default_schema` (String) Default schema in destination. This production schema will contain your transformed data. +- `environment_vars` (Set of String) List of environment variables defined as key-value pairs in the raw string format using = as a separator. The variable name should have the DBT_ prefix and can contain A-Z, 0-9, dash, underscore, or dot characters. Example: "DBT_VARIABLE=variable_value" +- `folder_path` (String) Folder in Git repo with your transformation project +- `git_branch` (String) Git branch +- `git_remote_url` (String) Git remote URL with your transformation project +- `target_name` (String) Target name to set or override the value from the deployment.yaml +- `threads` (Number) The number of threads transformation will use (from 1 to 32). Make sure this value is compatible with your destination type. For example, Snowflake supports only 8 concurrent queries on an X-Small warehouse. + +Read-Only: + +- `public_key` (String) Public key to grant Fivetran SSH access to git repository. + +## Import + +1. To import an existing `fivetran_transformation_project` resource into your Terraform state, you need to get **Transformation Project ID** via API call `GET https://api.fivetran.com/v1/transformation-projects` to retrieve available projects. +2. Fetch project details for particular `project-id` using `GET https://api.fivetran.com/v1/transformation-projects/{project-id}` to ensure that this is the project you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_transformation_project" "my_imported_fivetran_transformation_project" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_transformation_project.my_imported_fivetran_transformation_project {Transformation Project ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_transformation_project.my_imported_fivetran_transformation_project' +``` + +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/user.md b/docs/resources/user.md new file mode 100644 index 00000000..e3b07b89 --- /dev/null +++ b/docs/resources/user.md @@ -0,0 +1,67 @@ +--- +page_title: "Resource: fivetran_user" +--- + +# Resource: fivetran_user + +-This resource allows you to create, update, and delete users. + +## Example Usage + +```hcl +resource "fivetran_user" "user" { + email = "user@email.address.com" + given_name = "John" + family_name = "Doe" + phone = "+353 00 0000 0000" +} +``` + + +## Schema + +### Required + +- `email` (String) The email address that the user has associated with their user profile. +- `family_name` (String) The last name of the user. +- `given_name` (String) The first name of the user. + +### Optional + +- `invited` (Boolean) The field indicates whether the user has been invited to your account. +- `phone` (String) The phone number of the user. +- `picture` (String) The user's avatar as a URL link (for example, 'http://mycompany.com/avatars/john_white.png') or base64 data URI (for example, 'data:image/png;base64,aHR0cDovL215Y29tcGFueS5jb20vYXZhdGFycy9qb2huX3doaXRlLnBuZw==') +- `role` (String) The role that you would like to assign to the user. +- `verified` (Boolean) The field indicates whether the user has verified their email address in the account creation process. + +### Read-Only + +- `created_at` (String) The timestamp that the user created their Fivetran account. +- `id` (String) The unique identifier for the user within the Fivetran system. +- `logged_in_at` (String) The last time that the user has logged into their Fivetran account. + +## Import + +1. To import an existing `fivetran_user` resource into your Terraform state, you need to get `user_id`. +You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_user" "my_imported_fivetran_user" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_user.my_imported_fivetran_user {user_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_user.my_imported_fivetran_user' +``` +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/docs/resources/user_connector_membership.md b/docs/resources/user_connector_membership.md new file mode 100644 index 00000000..20fb8d7e --- /dev/null +++ b/docs/resources/user_connector_membership.md @@ -0,0 +1,78 @@ +--- +page_title: "Resource: fivetran_user_connector_membership" +--- + +# Resource: fivetran_user_connector_membership + +This resource allows you to create, update, and delete connector membership for user + +## Example Usage + +```hcl +resource "fivetran_user_connector_membership" "test_user_connector_membership" { + provider = fivetran-provider + + user_id = "test_user" + + connector { + connector_id = "test_connector" + role = "Connector Administrator" + created_at = "2020-05-25T15:26:47.306509Z" + } + + connector { + connector_id = "test_connector" + role = "Connector Administrator" + created_at = "2020-05-25T15:26:47.306509Z" + } +} +``` + + +## Schema + +### Required + +- `user_id` (String) The unique identifier for the user within your account. + +### Optional + +- `connector` (Block Set) (see [below for nested schema](#nestedblock--connector)) + + +### Nested Schema for `connector` + +Required: + +- `connector_id` (String) The connector unique identifier +- `role` (String) The user's role that links the user and the connector + +Read-Only: + +- `created_at` (String) The date and time the membership was created + +## Import + +1. To import an existing `fivetran_user_connector_membership` resource into your Terraform state, you need to get `user_id` and `connector_id` +You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_user_connector_membership" "my_imported_fivetran_user_connector_membership" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_user_connector_membership.my_imported_fivetran_user_connector_membership {user_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_user_connector_membership.my_imported_fivetran_user_connector_membership' +``` +5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/user_group_membership.md b/docs/resources/user_group_membership.md new file mode 100644 index 00000000..717a45f5 --- /dev/null +++ b/docs/resources/user_group_membership.md @@ -0,0 +1,78 @@ +--- +page_title: "Resource: fivetran_user_group_membership" +--- + +# Resource: fivetran_user_group_membership + +This resource allows you to create, update, and delete group membership for user + +## Example Usage + +```hcl +resource "fivetran_user_group_membership" "test_user_group_membership" { + provider = fivetran-provider + + user_id = "test_user" + + group { + connector_id = "test_connector" + group_id = "test_group" + role = "Destination Administrator" + } + + group { + connector_id = "test_connector" + group_id = "test_group" + role = "Destination Administrator" + } +} +``` + + +## Schema + +### Required + +- `user_id` (String) The unique identifier for the user within your account. + +### Optional + +- `group` (Block Set) (see [below for nested schema](#nestedblock--group)) + + +### Nested Schema for `group` + +Required: + +- `group_id` (String) The group unique identifier +- `role` (String) The user's role that links the user and the group + +Read-Only: + +- `created_at` (String) The date and time the membership was created + +## Import + +1. To import an existing `fivetran_user_group_membership` resource into your Terraform state, you need to get `user_id` and `group_id` +You can retrieve all users using the [fivetran_users data source](/docs/data-sources/users). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_user_group_membership" "my_imported_fivetran_user_group_membership" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_user_group_membership.my_imported_fivetran_user_group_membership {user_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_user_group_membership.my_imported_fivetran_user_group_membership' +``` +5. Copy the values and paste them to your `.tf` configuration. diff --git a/docs/resources/webhook.md b/docs/resources/webhook.md new file mode 100644 index 00000000..760c4271 --- /dev/null +++ b/docs/resources/webhook.md @@ -0,0 +1,69 @@ +--- +page_title: "Resource: fivetran_webhook" +--- + +# Resource: fivetran_webhook + +This resource allows you to create, update, and delete webhooks. + +## Example Usage + +```hcl +resource "fivetran_webhook" "test_webhook" { + id = "connector_id" + type = "group" + group_id = "group_id" + url = "https://your-host.your-domain/webhook" + secret = "password" + active = false + events : ["sync_start", "sync_end"] +} +``` + + +## Schema + +### Required + +- `active` (Boolean) Boolean, if set to true, webhooks are immediately sent in response to events +- `events` (Set of String) The array of event types +- `secret` (String, Sensitive) The secret string used for payload signing and masked in the response. +- `type` (String) The webhook type (group, account) +- `url` (String) Your webhooks URL endpoint for your application + +### Optional + +- `group_id` (String) The group ID +- `run_tests` (Boolean) Specifies whether the setup tests should be run + +### Read-Only + +- `created_at` (String) The webhook creation timestamp +- `created_by` (String) The ID of the user who created the webhook. +- `id` (String) The webhook ID + +## Import + +1. To import an existing `fivetran_webhook` resource into your Terraform state, you need to get `webhook_id`. +You can retrieve all webhooks using the [fivetran_webhooks data source](/docs/data-sources/webhooks). + +2. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_webhook" "my_imported_fivetran_webhook" { + +} +``` + +3. Run the `terraform import` command: + +``` +terraform import fivetran_webhook.my_imported_fivetran_webhook {webhook_id} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_webhook.my_imported_fivetran_webhook' +``` +5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file diff --git a/templates/data-sources/transformation_project.md.tmpl b/templates/data-sources/transformation_project.md.tmpl new file mode 100644 index 00000000..4cec7e91 --- /dev/null +++ b/templates/data-sources/transformation_project.md.tmpl @@ -0,0 +1,17 @@ +--- +page_title: "Data Source: fivetran_transformation_project" +--- + +# Data Source: fivetran_transformation_project + +Returns transformation project details if a valid identifier was provided + +## Example Usage + +```hcl +data "fivetran_transformation_project" "test" { + id = "id" +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file From dcb77b56b25371ed807f2053ebd5837be84dc5ae Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Thu, 23 Jan 2025 21:10:29 +0100 Subject: [PATCH 05/13] transformations --- .../framework/core/model/transformation.go | 194 +++++++++ .../framework/core/model/transformations.go | 179 +++++++++ .../framework/core/schema/transformation.go | 225 +++++++++++ .../framework/datasources/transformation.go | 58 +++ .../datasources/transformation_test.go | 147 +++++++ .../framework/datasources/transformations.go | 83 ++++ .../datasources/transformations_test.go | 187 +++++++++ fivetran/framework/provider.go | 3 + .../framework/resources/transformation.go | 369 ++++++++++++++++++ .../resources/transformation_project.go | 38 +- .../resources/transformation_test.go | 335 ++++++++++++++++ templates/data-sources/transformation.md.tmpl | 17 + .../data-sources/transformations.md.tmpl | 16 + ...nsformation_private_git_deploy_key.md.tmpl | 60 +++ templates/resources/transformation.md.tmpl | 60 +++ .../resources/transformation_project.md.tmpl | 59 ++- 16 files changed, 1996 insertions(+), 34 deletions(-) create mode 100644 fivetran/framework/core/model/transformation.go create mode 100644 fivetran/framework/core/model/transformations.go create mode 100644 fivetran/framework/core/schema/transformation.go create mode 100644 fivetran/framework/datasources/transformation.go create mode 100644 fivetran/framework/datasources/transformation_test.go create mode 100644 fivetran/framework/datasources/transformations.go create mode 100644 fivetran/framework/datasources/transformations_test.go create mode 100644 fivetran/framework/resources/transformation.go create mode 100644 fivetran/framework/resources/transformation_test.go create mode 100644 templates/data-sources/transformation.md.tmpl create mode 100644 templates/data-sources/transformations.md.tmpl create mode 100644 templates/guides/transformation_private_git_deploy_key.md.tmpl create mode 100644 templates/resources/transformation.md.tmpl diff --git a/fivetran/framework/core/model/transformation.go b/fivetran/framework/core/model/transformation.go new file mode 100644 index 00000000..afc83a08 --- /dev/null +++ b/fivetran/framework/core/model/transformation.go @@ -0,0 +1,194 @@ +package model + +import ( + "context" + + sdk "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type Transformation struct { + Id types.String `tfsdk:"id"` + Status types.String `tfsdk:"status"` + ProjectType types.String `tfsdk:"type"` + Paused types.Bool `tfsdk:"paused"` + CreatedAt types.String `tfsdk:"created_at"` + CreatedById types.String `tfsdk:"created_by_id"` + OutputModelNames types.Set `tfsdk:"output_model_names"` + Schedule types.Object `tfsdk:"schedule"` + Config types.Object `tfsdk:"transformation_config"` +} + +var ( + stepAttrTypes = map[string]attr.Type{ + "name": types.StringType, + "command": types.StringType, + } + + stepSetAttrType = types.ObjectType{ + AttrTypes: stepAttrTypes, + } + + scheduleAttrs = map[string]attr.Type{ + "schedule_type": types.StringType, + "days_of_week": types.SetType{ElemType: types.StringType}, + "cron": types.SetType{ElemType: types.StringType}, + "connection_ids": types.SetType{ElemType: types.StringType}, + "interval": types.Int64Type, + "time_of_day": types.StringType, + "smart_syncing": types.BoolType, + } + + configAttrs = map[string]attr.Type{ + "project_id": types.StringType, + "package_name": types.StringType, + "name": types.StringType, + "excluded_models": types.SetType{ElemType: types.StringType}, + "connection_ids": types.SetType{ElemType: types.StringType}, + "steps": types.ListType{ElemType: types.ObjectType{AttrTypes: stepAttrTypes}}, + "upgrade_available": types.BoolType, + } +) + +func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.TransformationResponse) { + d.Id = types.StringValue(resp.Data.Id) + d.Status = types.StringValue(resp.Data.Status) + d.ProjectType = types.StringValue(resp.Data.ProjectType) + d.CreatedAt = types.StringValue(resp.Data.CreatedAt) + d.CreatedById = types.StringValue(resp.Data.CreatedById) + d.Paused = types.BoolValue(resp.Data.Paused) + + if resp.Data.OutputModelNames != nil { + d.OutputModelNames = types.SetValueMust(types.StringType, stringListToAttrList(resp.Data.OutputModelNames)) + } else { + d.OutputModelNames = types.SetNull(types.StringType) + } + + scheduleAttrValues := map[string]attr.Value{} + scheduleAttrValues["smart_syncing"] = types.BoolValue(resp.Data.TransformationSchedule.SmartSyncing) + + if resp.Data.TransformationSchedule.ScheduleType == "INTERVAL" || resp.Data.TransformationSchedule.Interval > 0 { + scheduleAttrValues["interval"] = types.Int64Value(int64(resp.Data.TransformationSchedule.Interval)) + } else { + scheduleAttrValues["interval"] = types.Int64Null() + } + + if resp.Data.TransformationSchedule.TimeOfDay != "" { + scheduleAttrValues["time_of_day"] = types.StringValue(resp.Data.TransformationSchedule.TimeOfDay) + } else { + scheduleAttrValues["time_of_day"] = types.StringNull() + } + + if resp.Data.TransformationSchedule.ScheduleType != "" { + scheduleAttrValues["schedule_type"] = types.StringValue(resp.Data.TransformationSchedule.ScheduleType) + } else { + scheduleAttrValues["schedule_type"] = types.StringNull() + } + + if resp.Data.TransformationSchedule.Cron != nil { + vars := []attr.Value{} + for _, el := range resp.Data.TransformationSchedule.Cron { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["cron"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["cron"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["cron"] = types.SetNull(types.StringType) + } + + if resp.Data.TransformationSchedule.ConnectionIds != nil { + vars := []attr.Value{} + for _, el := range resp.Data.TransformationSchedule.ConnectionIds { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["connection_ids"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + + if resp.Data.TransformationSchedule.DaysOfWeek != nil { + vars := []attr.Value{} + for _, el := range resp.Data.TransformationSchedule.DaysOfWeek { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["days_of_week"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + } + + d.Schedule = types.ObjectValueMust(scheduleAttrs, scheduleAttrValues) + + configAttrValues := map[string]attr.Value{} + configAttrValues["upgrade_available"] = types.BoolValue(resp.Data.TransformationConfig.UpgradeAvailable) + if resp.Data.TransformationConfig.ProjectId != "" { + configAttrValues["project_id"] = types.StringValue(resp.Data.TransformationConfig.ProjectId) + } else { + configAttrValues["project_id"] = types.StringNull() + } + + if resp.Data.TransformationConfig.PackageName != "" { + configAttrValues["package_name"] = types.StringValue(resp.Data.TransformationConfig.PackageName) + } else { + configAttrValues["package_name"] = types.StringNull() + } + + if resp.Data.TransformationConfig.Name != "" { + configAttrValues["name"] = types.StringValue(resp.Data.TransformationConfig.Name) + } else { + configAttrValues["name"] = types.StringNull() + } + + if resp.Data.TransformationConfig.ConnectionIds != nil { + vars := []attr.Value{} + for _, el := range resp.Data.TransformationConfig.ConnectionIds { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + configAttrValues["connection_ids"] = types.SetValueMust(types.StringType, vars) + } else { + configAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + } else { + configAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + + if resp.Data.TransformationConfig.ExcludedModels != nil { + vars := []attr.Value{} + for _, el := range resp.Data.TransformationConfig.ExcludedModels { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + configAttrValues["excluded_models"] = types.SetValueMust(types.StringType, vars) + } else { + configAttrValues["excluded_models"] = types.SetNull(types.StringType) + } + } else { + configAttrValues["excluded_models"] = types.SetNull(types.StringType) + } + + + subItems := []attr.Value{} + for _, sub := range resp.Data.TransformationConfig.Steps { + subItem := map[string]attr.Value{} + subItem["name"] = types.StringValue(sub.Name) + subItem["command"] = types.StringValue(sub.Command) + + subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) + subItems = append(subItems, subObjectValue) + } + configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) + + d.Config = types.ObjectValueMust(configAttrs, configAttrValues) +} diff --git a/fivetran/framework/core/model/transformations.go b/fivetran/framework/core/model/transformations.go new file mode 100644 index 00000000..edbc6bca --- /dev/null +++ b/fivetran/framework/core/model/transformations.go @@ -0,0 +1,179 @@ +package model + +import ( + "context" + + "github.com/fivetran/go-fivetran/transformations" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type Transformations struct { + Transformations types.List `tfsdk:"transformations"` +} + +var ( + elemTypeAttrs = map[string]attr.Type{ + "id": types.StringType, + "status": types.StringType, + "type": types.StringType, + "created_at": types.StringType, + "created_by_id": types.StringType, + "paused": types.BoolType, + "output_model_names": types.SetType{ElemType: types.StringType}, + "schedule": types.ObjectType{AttrTypes: scheduleAttrs}, + "transformation_config": types.ObjectType{AttrTypes: configAttrs}, + } +) +func (d *Transformations) ReadFromResponse(ctx context.Context, resp transformations.TransformationsListResponse) { + if resp.Data.Items == nil { + d.Transformations = types.ListNull(types.ObjectType{AttrTypes: elemTypeAttrs}) + } else { + items := []attr.Value{} + for _, v := range resp.Data.Items { + item := map[string]attr.Value{} + item["id"] = types.StringValue(v.Id) + item["status"] = types.StringValue(v.Status) + item["type"] = types.StringValue(v.ProjectType) + item["created_at"] = types.StringValue(v.CreatedAt) + item["created_by_id"] = types.StringValue(v.CreatedById) + item["paused"] = types.BoolValue(v.Paused) + + if v.OutputModelNames != nil { + item["output_model_names"] = types.SetValueMust(types.StringType, stringListToAttrList(v.OutputModelNames)) + } else { + item["output_model_names"] = types.SetNull(types.StringType) + } + + scheduleAttrValues := map[string]attr.Value{} + scheduleAttrValues["smart_syncing"] = types.BoolValue(v.TransformationSchedule.SmartSyncing) + + if v.TransformationSchedule.ScheduleType == "INTERVAL" || v.TransformationSchedule.Interval > 0 { + scheduleAttrValues["interval"] = types.Int64Value(int64(v.TransformationSchedule.Interval)) + } else { + scheduleAttrValues["interval"] = types.Int64Null() + } + + if v.TransformationSchedule.TimeOfDay != "" { + scheduleAttrValues["time_of_day"] = types.StringValue(v.TransformationSchedule.TimeOfDay) + } else { + scheduleAttrValues["time_of_day"] = types.StringNull() + } + + if v.TransformationSchedule.ScheduleType != "" { + scheduleAttrValues["schedule_type"] = types.StringValue(v.TransformationSchedule.ScheduleType) + } else { + scheduleAttrValues["schedule_type"] = types.StringNull() + } + + if v.TransformationSchedule.Cron != nil { + vars := []attr.Value{} + for _, el := range v.TransformationSchedule.Cron { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["cron"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["cron"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["cron"] = types.SetNull(types.StringType) + } + + if v.TransformationSchedule.ConnectionIds != nil { + vars := []attr.Value{} + for _, el := range v.TransformationSchedule.ConnectionIds { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["connection_ids"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + + if v.TransformationSchedule.DaysOfWeek != nil { + vars := []attr.Value{} + for _, el := range v.TransformationSchedule.DaysOfWeek { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + scheduleAttrValues["days_of_week"] = types.SetValueMust(types.StringType, vars) + } else { + scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + } + } else { + scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + } + + item["schedule"] = types.ObjectValueMust(scheduleAttrs, scheduleAttrValues) + + configAttrValues := map[string]attr.Value{} + configAttrValues["upgrade_available"] = types.BoolValue(v.TransformationConfig.UpgradeAvailable) + if v.TransformationConfig.ProjectId != "" { + configAttrValues["project_id"] = types.StringValue(v.TransformationConfig.ProjectId) + } else { + configAttrValues["project_id"] = types.StringNull() + } + + if v.TransformationConfig.PackageName != "" { + configAttrValues["package_name"] = types.StringValue(v.TransformationConfig.PackageName) + } else { + configAttrValues["package_name"] = types.StringNull() + } + + if v.TransformationConfig.Name != "" { + configAttrValues["name"] = types.StringValue(v.TransformationConfig.Name) + } else { + configAttrValues["name"] = types.StringNull() + } + + if v.TransformationConfig.ConnectionIds != nil { + vars := []attr.Value{} + for _, el := range v.TransformationConfig.ConnectionIds { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + configAttrValues["connection_ids"] = types.SetValueMust(types.StringType, vars) + } else { + configAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + } else { + configAttrValues["connection_ids"] = types.SetNull(types.StringType) + } + + if v.TransformationConfig.ExcludedModels != nil { + vars := []attr.Value{} + for _, el := range v.TransformationConfig.ExcludedModels { + vars = append(vars, types.StringValue(el)) + } + if len(vars) > 0 { + configAttrValues["excluded_models"] = types.SetValueMust(types.StringType, vars) + } else { + configAttrValues["excluded_models"] = types.SetNull(types.StringType) + } + } else { + configAttrValues["excluded_models"] = types.SetNull(types.StringType) + } + + subItems := []attr.Value{} + for _, sub := range v.TransformationConfig.Steps { + subItem := map[string]attr.Value{} + subItem["name"] = types.StringValue(sub.Name) + subItem["command"] = types.StringValue(sub.Command) + + subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) + subItems = append(subItems, subObjectValue) + } + configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) + + item["transformation_config"] = types.ObjectValueMust(configAttrs, configAttrValues) + + objectValue, _ := types.ObjectValue(elemTypeAttrs, item) + items = append(items, objectValue) + } + d.Transformations, _ = types.ListValue(types.ObjectType{AttrTypes: elemTypeAttrs}, items) + } +} \ No newline at end of file diff --git a/fivetran/framework/core/schema/transformation.go b/fivetran/framework/core/schema/transformation.go new file mode 100644 index 00000000..05b8a7c4 --- /dev/null +++ b/fivetran/framework/core/schema/transformation.go @@ -0,0 +1,225 @@ +package schema + +import ( + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + resourceSchema "github.com/hashicorp/terraform-plugin-framework/resource/schema" +) + +func TransformationResource() resourceSchema.Schema { + return resourceSchema.Schema{ + Attributes: transformationSchema().GetResourceSchema(), + Blocks: transformationResourceBlocks(), + } +} + +func TransformationDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema{ + Attributes: transformationSchema().GetDatasourceSchema(), + Blocks: transformationDatasourceBlocks(), + } +} + +func TransformationListDatasource() datasourceSchema.Schema { + return datasourceSchema.Schema { + Blocks: map[string]datasourceSchema.Block{ + "transformations": datasourceSchema.ListNestedBlock{ + NestedObject: datasourceSchema.NestedBlockObject{ + Attributes: transformationSchema().GetDatasourceSchema(), + Blocks: transformationDatasourceBlocks(), + }, + }, + }, + } +} + +func transformationSchema() core.Schema { + return core.Schema{ + Fields: map[string]core.SchemaField{ + "id": { + ValueType: core.String, + IsId: true, + Description: "The unique identifier for the dbt Transformation within the Fivetran system.", + }, + "paused": { + ValueType: core.Boolean, + Description: "The field indicating whether the transformation will be set into the paused state. By default, the value is false.", + }, + "type": { + ValueType: core.String, + Description: "Transformation type.", + }, + "created_at": { + ValueType: core.String, + Readonly: true, + Description: "The timestamp of when the transformation was created in your account.", + }, + "created_by_id": { + ValueType: core.String, + Readonly: true, + Description: "The unique identifier for the User within the Fivetran system who created the transformation.", + }, + "status": { + ValueType: core.String, + Readonly: true, + Description: "Status of transformation Project (NOT_READY, READY, ERROR).", + }, + "output_model_names": { + ValueType: core.StringsSet, + Readonly: true, + Description: "Identifiers of related models.", + }, + }, + } +} + +func transformationScheduleSchema() core.Schema { + return core.Schema{ + Fields: map[string]core.SchemaField{ + "schedule_type": { + ValueType: core.String, + Description: "The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value.", + }, + "days_of_week": { + ValueType: core.StringsSet, + Description: "The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY.", + }, + "interval": { + ValueType: core.Integer, + Description: "The time interval in minutes between subsequent transformation runs.", + }, + "time_of_day": { + ValueType: core.String, + Description: `The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00"`, + }, + "connection_ids": { + ValueType: core.StringsSet, + Description: "Identifiers of related connectors.", + }, + "smart_syncing": { + ValueType: core.Boolean, + Description: "The boolean flag that enables the Smart Syncing schedule", + }, + "cron": { + ValueType: core.StringsSet, + Description: "Cron schedule: list of CRON strings.", + }, + }, + } +} + +func transformationConfigDatasourceSchema() map[string]datasourceSchema.Attribute { + return map[string]datasourceSchema.Attribute{ + "project_id": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The unique identifier for the dbt Core project within the Fivetran system", + }, + "name": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The transformation name", + }, + "package_name": datasourceSchema.StringAttribute{ + Computed: true, + Description: `The Quickstart transformation package name`, + }, + "connection_ids": datasourceSchema.SetAttribute{ + ElementType: basetypes.StringType{}, + Computed: true, + Description: "The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified", + }, + "excluded_models": datasourceSchema.SetAttribute{ + ElementType: basetypes.StringType{}, + Computed: true, + Description: "The list of excluded output model names", + }, + "upgrade_available": datasourceSchema.BoolAttribute{ + Computed: true, + Description: "The boolean flag indicating that a newer version is available for the transformation package", + }, + "steps": datasourceSchema.ListNestedAttribute{ + Computed: true, + NestedObject: datasourceSchema.NestedAttributeObject{ + Attributes: map[string]datasourceSchema.Attribute{ + "name": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The step name", + }, + "command": datasourceSchema.StringAttribute{ + Computed: true, + Description: "The dbt command in the transformation step", + }, + }, + }, + }, + } +} + +func transformationConfigResourceSchema() map[string]resourceSchema.Attribute { + return map[string]resourceSchema.Attribute{ + "project_id": resourceSchema.StringAttribute{ + Optional: true, + Description: "The unique identifier for the dbt Core project within the Fivetran system", + }, + "name": resourceSchema.StringAttribute{ + Optional: true, + Description: "The transformation name", + }, + "package_name": resourceSchema.StringAttribute{ + Optional: true, + Description: `The Quickstart transformation package name`, + }, + "connection_ids": resourceSchema.SetAttribute{ + Optional: true, + ElementType: basetypes.StringType{}, + Description: "The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified", + }, + "excluded_models": resourceSchema.SetAttribute{ + Optional: true, + ElementType: basetypes.StringType{}, + Description: "The list of excluded output model names", + }, + "upgrade_available": resourceSchema.BoolAttribute{ + Computed: true, + Optional: true, + Description: "The boolean flag indicating that a newer version is available for the transformation package", + }, + "steps": resourceSchema.ListNestedAttribute{ + Optional: true, + NestedObject: resourceSchema.NestedAttributeObject{ + Attributes: map[string]resourceSchema.Attribute{ + "name": resourceSchema.StringAttribute{ + Optional: true, + Description: "The step name", + }, + "command": resourceSchema.StringAttribute{ + Optional: true, + Description: "The dbt command in the transformation step", + }, + }, + }, + }, + } +} + +func transformationResourceBlocks() map[string]resourceSchema.Block { + return map[string]resourceSchema.Block{ + "schedule": resourceSchema.SingleNestedBlock{ + Attributes: transformationScheduleSchema().GetResourceSchema(), + }, + "transformation_config": resourceSchema.SingleNestedBlock{ + Attributes: transformationConfigResourceSchema(), + }, + } +} + +func transformationDatasourceBlocks() map[string]datasourceSchema.Block { + return map[string]datasourceSchema.Block{ + "schedule": datasourceSchema.SingleNestedBlock{ + Attributes: transformationScheduleSchema().GetDatasourceSchema(), + }, + "transformation_config": datasourceSchema.SingleNestedBlock{ + Attributes: transformationConfigDatasourceSchema(), + }, + } +} diff --git a/fivetran/framework/datasources/transformation.go b/fivetran/framework/datasources/transformation.go new file mode 100644 index 00000000..8c51ef38 --- /dev/null +++ b/fivetran/framework/datasources/transformation.go @@ -0,0 +1,58 @@ +package datasources + +import ( + "context" + "fmt" + + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +func Transformation() datasource.DataSource { + return &transformation{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &transformation{} + +type transformation struct { + core.ProviderDatasource +} + +func (d *transformation) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_transformation" +} + +func (d *transformation) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.TransformationDatasource() +} + +func (d *transformation) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.Transformation + + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + readResponse, err := d.GetClient().NewTransformationDetails().TransformationId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Transformation Read Error.", + fmt.Sprintf("%v; code: %v; message: %v", err, readResponse.Code, readResponse.Message), + ) + return + } + + data.ReadFromResponse(ctx, readResponse) + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} \ No newline at end of file diff --git a/fivetran/framework/datasources/transformation_test.go b/fivetran/framework/datasources/transformation_test.go new file mode 100644 index 00000000..4ff9ebc9 --- /dev/null +++ b/fivetran/framework/datasources/transformation_test.go @@ -0,0 +1,147 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationDataSourceMockGetHandler *mock.Handler + transformationDataSourceMockData map[string]interface{} +) + +func setupMockClienttransformationDataSourceMappingTest(t *testing.T) { + transformationResponse := ` +{ + "id": "transformation_id", + "status": "status", + "schedule": { + "cron": [ + "cron1", + "cron2" + ], + "interval": 60, + "smart_syncing": true, + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "schedule_type": "schedule_type", + "days_of_week": [ + "days_of_week1", + "days_of_week2" + ], + "time_of_day": "time_of_day" + }, + "type": "type", + "paused": true, + "created_at": "created_at", + "output_model_names": [ + "output_model_name1", + "output_model_name2" + ], + "created_by_id": "created_by_id", + "transformation_config": { + "project_id": "project_id", + "name": "name", + "steps": [ + { + "name": "name1", + "command": "command1" + }, + { + "name": "name2", + "command": "command2" + } + ], + "package_name": "package_name", + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "excluded_models": [ + "excluded_model1", + "excluded_model2" + ], + "upgrade_available": true + } + }` + tfmock.MockClient().Reset() + + transformationDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationDataSourceMockData), nil + }, + ) +} + +func TestDataSourcetransformationMappingMock(t *testing.T) { + // NOTE: the config is totally inconsistent and contains all possible values for mapping test + step1 := resource.TestStep{ + Config: ` + data "fivetran_transformation" "transformation" { + provider = fivetran-provider + id = "transformation_id" + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationDataSourceMockGetHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, transformationDataSourceMockData) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "status", "status"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "created_at", "created_at"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "type", "type"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "output_model_names.1", "output_model_name2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.project_id", "project_id"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.name", "name"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.0.name", "name1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.0.command", "command1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.1.name", "name2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.1.command", "command2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.package_name", "package_name"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.0", "excluded_model1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.1", "excluded_model2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.upgrade_available", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.interval", "60"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.cron.1", "cron2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClienttransformationDataSourceMappingTest(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/datasources/transformations.go b/fivetran/framework/datasources/transformations.go new file mode 100644 index 00000000..5927f56a --- /dev/null +++ b/fivetran/framework/datasources/transformations.go @@ -0,0 +1,83 @@ +package datasources + +import ( + "context" + "fmt" + + sdk "github.com/fivetran/go-fivetran/transformations" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +func Transformations() datasource.DataSource { + return &transformations{} +} + +// Ensure the implementation satisfies the desired interfaces. +var _ datasource.DataSourceWithConfigure = &transformations{} + +type transformations struct { + core.ProviderDatasource +} + +func (d *transformations) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = "fivetran_transformations" +} + +func (d *transformations) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = fivetranSchema.TransformationListDatasource() +} + +func (d *transformations) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + if d.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.Transformations + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + + var respNextCursor string + var listResponse sdk.TransformationsListResponse + limit := 1000 + + for { + var err error + var tmpResp sdk.TransformationsListResponse + svc := d.GetClient().NewTransformationsList() + + if respNextCursor == "" { + tmpResp, err = svc.Limit(limit).Do(ctx) + } + + if respNextCursor != "" { + tmpResp, err = svc.Limit(limit).Cursor(respNextCursor).Do(ctx) + } + + if err != nil { + resp.Diagnostics.AddError( + "Read error.", + fmt.Sprintf("%v; code: %v", err, tmpResp.Code), + ) + listResponse = sdk.TransformationsListResponse{} + } + + listResponse.Data.Items = append(listResponse.Data.Items, tmpResp.Data.Items...) + + if tmpResp.Data.NextCursor == "" { + break + } + + respNextCursor = tmpResp.Data.NextCursor + } + + data.ReadFromResponse(ctx, listResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/fivetran/framework/datasources/transformations_test.go b/fivetran/framework/datasources/transformations_test.go new file mode 100644 index 00000000..8f2780f3 --- /dev/null +++ b/fivetran/framework/datasources/transformations_test.go @@ -0,0 +1,187 @@ +package datasources_test + +import ( + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationsDataSourceMockGetHandler *mock.Handler + transformationsDataSourceMockData map[string]interface{} +) + +const ( + transformationsMappingResponse = ` + { + "items": [ + { + "id": "transformation_id1", + "status": "status1", + "schedule": { + "cron": [ + "cron1" + ], + "interval": 601, + "smart_syncing": true, + "connection_ids": [ + "connection_id1" + ], + "schedule_type": "schedule_type1", + "days_of_week": [ + "days_of_week01", + "days_of_week11" + ], + "time_of_day": "time_of_day1" + }, + "type": "type1", + "paused": true, + "created_at": "created_at1", + "output_model_names": [ + "output_model_name1" + ], + "created_by_id": "created_by_id1", + "transformation_config": { + "project_id": "project_id1", + "name": "name1", + "steps": [ + { + "name": "name01", + "command": "command01" + }, + { + "name": "name02", + "command": "command02" + } + ] + } + }, +{ + "id": "transformation_id2", + "status": "status2", + "schedule": { + "cron": [ + "cron2" + ], + "interval": 602, + "smart_syncing": true, + "connection_ids": [ + "connection_id2" + ], + "schedule_type": "schedule_type2", + "days_of_week": [ + "days_of_week02", + "days_of_week12" + ], + "time_of_day": "time_of_day2" + }, + "type": "type2", + "paused": true, + "created_at": "created_at2", + "output_model_names": [ + "output_model_name2" + ], + "created_by_id": "created_by_id2", + "transformation_config": { + "package_name": "package_name2", + "connection_ids": [ + "connection_id2" + ], + "excluded_models": [ + "excluded_model2" + ], + "upgrade_available": true + } + } + ], + "next_cursor": null + } + ` +) + +func setupMockClienttransformationsDataSourceConfigMapping(t *testing.T) { + tfmock.MockClient().Reset() + transformationsDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations").ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationsDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationsMappingResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationsDataSourceMockData), nil + }, + ) +} + +func TestDataSourcetransformationsMappingMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + data "fivetran_transformations" "transformation" { + provider = fivetran-provider + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationsDataSourceMockGetHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.id", "transformation_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.status", "status1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.created_at", "created_at1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.created_by_id", "created_by_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.type", "type1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.paused", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.project_id", "project_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.name", "name1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.steps.0.name", "name01"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.steps.0.command", "command01"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.steps.1.name", "name02"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.transformation_config.steps.1.command", "command02"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.interval", "601"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.schedule_type", "schedule_type1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.days_of_week.0", "days_of_week01"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.days_of_week.1", "days_of_week11"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.0.schedule.time_of_day", "time_of_day1"), + + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.id", "transformation_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.status", "status2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.created_at", "created_at2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.created_by_id", "created_by_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.type", "type2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.paused", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.output_model_names.0", "output_model_name2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.transformation_config.package_name", "package_name2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.transformation_config.connection_ids.0", "connection_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.transformation_config.excluded_models.0", "excluded_model2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.transformation_config.upgrade_available", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.interval", "602"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.schedule_type", "schedule_type2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.cron.0", "cron2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.connection_ids.0", "connection_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.days_of_week.0", "days_of_week02"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.days_of_week.1", "days_of_week12"), + resource.TestCheckResourceAttr("data.fivetran_transformations.transformation", "transformations.1.schedule.time_of_day", "time_of_day2"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClienttransformationsDataSourceConfigMapping(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} diff --git a/fivetran/framework/provider.go b/fivetran/framework/provider.go index 46b4640c..808741ce 100644 --- a/fivetran/framework/provider.go +++ b/fivetran/framework/provider.go @@ -124,6 +124,7 @@ func (p *fivetranProvider) Resources(ctx context.Context) []func() resource.Reso resources.DbtGitProjectConfig, resources.PrivateLink, resources.TransformationProject, + resources.Transformation, } } @@ -174,5 +175,7 @@ func (p *fivetranProvider) DataSources(ctx context.Context) []func() datasource. datasources.QuickstartPackages, datasources.TransformationProject, datasources.TransformationProjects, + datasources.Transformation, + datasources.Transformations, } } diff --git a/fivetran/framework/resources/transformation.go b/fivetran/framework/resources/transformation.go new file mode 100644 index 00000000..087820d5 --- /dev/null +++ b/fivetran/framework/resources/transformation.go @@ -0,0 +1,369 @@ +package resources + +import ( + "context" + "fmt" + + "github.com/fivetran/go-fivetran" + "github.com/fivetran/go-fivetran/transformations" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core" + "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/model" + fivetranSchema "github.com/fivetran/terraform-provider-fivetran/fivetran/framework/core/schema" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +func Transformation() resource.Resource { + return &transformation{} +} + +type transformation struct { + core.ProviderResource +} + +// Ensure the implementation satisfies the desired interfaces. +var _ resource.ResourceWithConfigure = &transformation{} +var _ resource.ResourceWithImportState = &transformation{} + +func (r *transformation) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = "fivetran_transformation" +} + +func (r *transformation) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = fivetranSchema.TransformationResource() +} + +func (r *transformation) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.Transformation + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + client := r.GetClient() + svc := client.NewTransformationCreate() + svc.ProjectType(data.ProjectType.ValueString()) + svc.Paused(data.Paused.ValueBool()) + + if !data.Config.IsNull() && !data.Config.IsUnknown() { + config := fivetran.NewTransformationConfig() + configAttributes := data.Config.Attributes() + if !configAttributes["project_id"].(basetypes.StringValue).IsNull() && !configAttributes["project_id"].(basetypes.StringValue).IsUnknown() { + config.ProjectId(configAttributes["project_id"].(basetypes.StringValue).ValueString()) + } + if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { + config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) + } + if !configAttributes["package_name"].(basetypes.StringValue).IsNull() && !configAttributes["package_name"].(basetypes.StringValue).IsUnknown() { + config.PackageName(configAttributes["package_name"].(basetypes.StringValue).ValueString()) + } + + if !configAttributes["connection_ids"].IsUnknown() && !configAttributes["connection_ids"].IsNull() { + evars := []string{} + for _, ev := range configAttributes["connection_ids"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + config.ConnectionIds(evars) + } + + if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { + evars := []string{} + for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + config.ExcludedModels(evars) + } + + if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { + evars := []transformations.TransformationStep{} + for _, ev := range configAttributes["steps"].(basetypes.SetValue).Elements() { + if element, ok := ev.(basetypes.ObjectValue); ok { + step := transformations.TransformationStep{} + step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() + step.Command = element.Attributes()["command"].(basetypes.StringValue).ValueString() + evars = append(evars, step) + } + } + config.Steps(evars) + } + + svc.TransformationConfig(config) + } + + if !data.Schedule.IsNull() && !data.Schedule.IsUnknown() { + schedule := fivetran.NewTransformationSchedule() + scheduleAttributes := data.Schedule.Attributes() + + if !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsNull() && !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsUnknown() { + schedule.TimeOfDay(scheduleAttributes["time_of_day"].(basetypes.StringValue).ValueString()) + } + if !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsNull() && !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsUnknown() { + schedule.ScheduleType(scheduleAttributes["schedule_type"].(basetypes.StringValue).ValueString()) + } + if !scheduleAttributes["interval"].(basetypes.Int64Value).IsNull() && !scheduleAttributes["interval"].(basetypes.Int64Value).IsUnknown() { + schedule.Interval(int(scheduleAttributes["interval"].(basetypes.Int64Value).ValueInt64())) + } + if !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsNull() && !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsUnknown() { + schedule.SmartSyncing(scheduleAttributes["smart_syncing"].(basetypes.BoolValue).ValueBool()) + } + + if !scheduleAttributes["connection_ids"].IsUnknown() && !scheduleAttributes["connection_ids"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["connection_ids"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.ConnectionIds(evars) + } + + if !scheduleAttributes["days_of_week"].IsUnknown() && !scheduleAttributes["days_of_week"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["days_of_week"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.DaysOfWeek(evars) + } + + if !scheduleAttributes["cron"].IsUnknown() && !scheduleAttributes["cron"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["cron"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.Cron(evars) + } + + svc.TransformationSchedule(schedule) + } + + createResponse, err := svc.Do(ctx) + if err != nil { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, createResponse.Code, createResponse.Message), + ) + + return + } + + data.ReadFromResponse(ctx, createResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + if resp.Diagnostics.HasError() { + // Do cleanup on error + deleteResponse, err := client.NewTransformationDelete().TransformationId(createResponse.Data.Id).Do(ctx) + if err != nil { + resp.Diagnostics.AddError( + "Unable to Cleanup Transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, deleteResponse.Code, deleteResponse.Message), + ) + } + } +} + +func (r *transformation) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.Transformation + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + readResponse, err := r.GetClient().NewTransformationDetails().TransformationId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Read Transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, readResponse.Code, readResponse.Message), + ) + return + } + + data.ReadFromResponse(ctx, readResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var state model.Transformation + var plan model.Transformation + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + + if resp.Diagnostics.HasError() { + return + } + + svc := r.GetClient().NewTransformationUpdate() + svc.Paused(plan.Paused.ValueBool()) + + if !plan.Config.IsNull() && !plan.Config.IsUnknown() { + config := fivetran.NewTransformationConfig() + configAttributes := plan.Config.Attributes() + if !configAttributes["project_id"].(basetypes.StringValue).IsNull() && !configAttributes["project_id"].(basetypes.StringValue).IsUnknown() { + config.ProjectId(configAttributes["project_id"].(basetypes.StringValue).ValueString()) + } + if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { + config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) + } + if !configAttributes["package_name"].(basetypes.StringValue).IsNull() && !configAttributes["package_name"].(basetypes.StringValue).IsUnknown() { + config.PackageName(configAttributes["package_name"].(basetypes.StringValue).ValueString()) + } + + if !configAttributes["connection_ids"].IsUnknown() && !configAttributes["connection_ids"].IsNull() { + evars := []string{} + for _, ev := range configAttributes["connection_ids"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + config.ConnectionIds(evars) + } + + if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { + evars := []string{} + for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + config.ExcludedModels(evars) + } + + if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { + evars := []transformations.TransformationStep{} + for _, ev := range configAttributes["steps"].(basetypes.SetValue).Elements() { + if element, ok := ev.(basetypes.ObjectValue); ok { + var step transformations.TransformationStep + step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() + step.Command = element.Attributes()["command"].(basetypes.StringValue).ValueString() + evars = append(evars, step) + } + } + config.Steps(evars) + } + + svc.TransformationConfig(config) + } + + if !plan.Schedule.IsNull() && !plan.Schedule.IsUnknown() { + schedule := fivetran.NewTransformationSchedule() + scheduleAttributes := plan.Schedule.Attributes() + + if !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsNull() && !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsUnknown() { + schedule.TimeOfDay(scheduleAttributes["time_of_day"].(basetypes.StringValue).ValueString()) + } + if !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsNull() && !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsUnknown() { + schedule.ScheduleType(scheduleAttributes["schedule_type"].(basetypes.StringValue).ValueString()) + } + if !scheduleAttributes["interval"].(basetypes.Int64Value).IsNull() && !scheduleAttributes["interval"].(basetypes.Int64Value).IsUnknown() { + schedule.Interval(int(scheduleAttributes["interval"].(basetypes.Int64Value).ValueInt64())) + } + if !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsNull() && !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsUnknown() { + schedule.SmartSyncing(scheduleAttributes["smart_syncing"].(basetypes.BoolValue).ValueBool()) + } + + if !scheduleAttributes["connection_ids"].IsUnknown() && !scheduleAttributes["connection_ids"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["connection_ids"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.ConnectionIds(evars) + } + + if !scheduleAttributes["days_of_week"].IsUnknown() && !scheduleAttributes["days_of_week"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["days_of_week"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.DaysOfWeek(evars) + } + + if !scheduleAttributes["cron"].IsUnknown() && !scheduleAttributes["cron"].IsNull() { + evars := []string{} + for _, ev := range scheduleAttributes["cron"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + schedule.Cron(evars) + } + + svc.TransformationSchedule(schedule) + } + + updateResponse, err := svc.Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Update Transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, updateResponse.Code, updateResponse.Message), + ) + return + } + + plan.ReadFromResponse(ctx, updateResponse) + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *transformation) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + if r.GetClient() == nil { + resp.Diagnostics.AddError( + "Unconfigured Fivetran Client", + "Please report this issue to the provider developers.", + ) + + return + } + + var data model.Transformation + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + + if resp.Diagnostics.HasError() { + return + } + + deleteResponse, err := r.GetClient().NewTransformationDelete().TransformationId(data.Id.ValueString()).Do(ctx) + + if err != nil { + resp.Diagnostics.AddError( + "Unable to Delete transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, deleteResponse.Code, deleteResponse.Message), + ) + return + } +} diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go index fe6ad477..9850ed59 100644 --- a/fivetran/framework/resources/transformation_project.go +++ b/fivetran/framework/resources/transformation_project.go @@ -86,39 +86,11 @@ func (r *transformationProject) Create(ctx context.Context, req resource.CreateR projectResponse, err := svc.Do(ctx) if err != nil { - if projectResponse.Code != "DbtProjectExists" { - resp.Diagnostics.AddError( - "Unable to Create dbt Project Resource.", - fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), - ) - - return - } else { - // try to recover Id - projectListResponse, err := r.GetClient().NewTransformationProjectsList().Do(ctx) - - if err != nil { - resp.Diagnostics.AddError( - "Unable to Read Transformation Project Resource.", - fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), - ) - return - } - - for _, v := range projectListResponse.Data.Items { - if v.GroupId == data.GroupId.ValueString() { - projectResponse, err := r.GetClient().NewTransformationProjectDetails().ProjectId(v.Id).Do(ctx) - - if err != nil { - resp.Diagnostics.AddError( - "Unable to Read Transformation Project Resource.", - fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), - ) - return - } - } - } - } + resp.Diagnostics.AddError( + "Unable to Create Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return } data.ReadFromResponse(ctx, projectResponse) diff --git a/fivetran/framework/resources/transformation_test.go b/fivetran/framework/resources/transformation_test.go new file mode 100644 index 00000000..13e9d9a6 --- /dev/null +++ b/fivetran/framework/resources/transformation_test.go @@ -0,0 +1,335 @@ +package resources_test + +import ( + "net/http" + "testing" + "time" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +var ( + transformationPostHandler *mock.Handler + transformationPatchHandler *mock.Handler + transformationDeleteHandler *mock.Handler + transformationData map[string]interface{} +) + +func onPostTranformation(t *testing.T, req *http.Request) (*http.Response, error) { + tfmock.AssertEmpty(t, transformationData) + + body := tfmock.RequestBodyToJson(t, req) +v + // Check the request + tfmock.AssertEqual(t, len(body), 4) + + tfmock.AssertKeyExistsAndHasValue(t, body, "dbt_model_id", "dbt_model_id") + tfmock.AssertKeyExistsAndHasValue(t, body, "paused", false) + tfmock.AssertKeyExistsAndHasValue(t, body, "run_tests", false) + + requestSchedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) + + tfmock.AssertKeyExistsAndHasValue(t, requestSchedule, "schedule_type", "TIME_OF_DAY") + tfmock.AssertKeyExistsAndHasValue(t, requestSchedule, "time_of_day", "12:00") + + requestScheduleDays := tfmock.AssertKeyExists(t, requestSchedule, "days_of_week").([]interface{}) + + expectedDays := make([]interface{}, 0) + + expectedDays = append(expectedDays, "MONDAY") + //expectedDays = append(expectedDays, "SATURDAY") + + tfmock.AssertArrayItems(t, requestScheduleDays, expectedDays) + + // Add response fields + body["id"] = "transformation_id" + body["dbt_project_id"] = "dbt_project_id" + body["output_model_name"] = "output_model_name" + + connectorIds := make([]string, 0) + body["connector_ids"] = append(connectorIds, "connector_id") + + modelIds := make([]string, 0) + body["model_ids"] = append(modelIds, "model_id") + + body["created_at"] = time.Now().Format("2006-01-02T15:04:05.000000Z") + + transformationData = body + + response := tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "", transformationData) + + return response, nil +} + +func onPatchTransformation(t *testing.T, req *http.Request, updateIteration int) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationData) + + body := tfmock.RequestBodyToJson(t, req) + + if updateIteration == 0 { + // Check the request + tfmock.AssertEqual(t, len(body), 3) + tfmock.AssertKeyExistsAndHasValue(t, body, "paused", true) + tfmock.AssertKeyExistsAndHasValue(t, body, "run_tests", true) + requestSchedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) + + requestScheduleDays := tfmock.AssertKeyExists(t, requestSchedule, "days_of_week").([]interface{}) + expectedDays := make([]interface{}, 0) + expectedDays = append(expectedDays, "MONDAY") + expectedDays = append(expectedDays, "SATURDAY") + + tfmock.AssertArrayItems(t, requestScheduleDays, expectedDays) + + // Update saved values + for k, v := range body { + if k != "schedule" { + transformationData[k] = v + } else { + stateSchedule := transformationData[k].(map[string]interface{}) + stateSchedule["days_of_week"] = expectedDays + } + } + + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Transformation has been updated", transformationData) + return response, nil + } + + if updateIteration == 1 { + // Check the request + tfmock.AssertEqual(t, len(body), 1) + schedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "schedule_type", "INTERVAL") + tfmock.AssertKeyExistsAndHasValue(t, schedule, "interval", float64(60)) + + // Update saved values + for k, v := range body { + transformationData[k] = v + } + + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Transformation has been updated", transformationData) + return response, nil + } + + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationData) + + return response, nil +} + +func setupMockClientTransformationResource(t *testing.T) { + tfmock.MockClient().Reset() + transformationData = nil + updateCounter := 0 + + transformationPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/dbt/transformations").ThenCall( + func(req *http.Request) (*http.Response, error) { + return onPostTranformation(t, req) + }, + ) + + tfmock.MockClient().When(http.MethodGet, "/v1/dbt/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationData) + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationData) + return response, nil + }, + ) + + transformationPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/dbt/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + response, err := onPatchTransformation(t, req, updateCounter) + updateCounter++ + return response, err + }, + ) + + transformationDeleteHandler = tfmock.MockClient().When(http.MethodDelete, "/v1/dbt/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationData) + transformationData = nil + response := tfmock.FivetranSuccessResponse(t, req, 200, "", nil) + return response, nil + }, + ) + + projectResponse := `{ + "id": "project_id", + "group_id": "group_id", + "dbt_version": "dbt_version", + "created_at": "created_at", + "created_by_id": "created_by_id", + "public_key": "public_key", + "default_schema": "default_schema", + "target_name": "target_name", + "environment_vars": ["environment_var"], + "threads": 1, + "type": "GIT", + "project_config": { + "git_remote_url": "git_remote_url", + "git_branch": "git_branch", + "folder_path": "folder_path" + }, + "status": "READY" + } + ` + + tfmock.MockClient().When(http.MethodGet, "/v1/dbt/projects/dbt_project_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, projectResponse)), nil + }, + ) + + modelsMappingResponse := ` + { + "items":[ + { + "id": "dbt_model_id", + "model_name": "dbt_model_name", + "scheduled": true + } + ], + "next_cursor": null + } + ` + + modelMappingResponse := ` + { + "id": "dbt_model_id", + "model_name": "dbt_model_name", + "scheduled": true + } + ` + + tfmock.MockClient().When(http.MethodGet, "/v1/dbt/models/dbt_model_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, modelMappingResponse)), nil + }, + ) + + tfmock.MockClient().When(http.MethodGet, "/v1/dbt/models").ThenCall( + func(req *http.Request) (*http.Response, error) { + project_id := req.URL.Query().Get("project_id") + tfmock.AssertEqual(t, project_id, "dbt_project_id") + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, modelsMappingResponse)), nil + }, + ) + +} + +func TestResourceTransformationMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + resource "fivetran_dbt_transformation" "transformation" { + provider = fivetran-provider + + dbt_project_id = "dbt_project_id" + dbt_model_name = "dbt_model_name" + run_tests = "false" + paused = "false" + schedule { + schedule_type = "TIME_OF_DAY" + time_of_day = "12:00" + days_of_week = ["MONDAY"] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationPostHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, transformationData) + return nil + }, + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "dbt_model_id", "dbt_model_id"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "run_tests", "false"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "paused", "false"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.schedule_type", "TIME_OF_DAY"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.time_of_day", "12:00"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.days_of_week.0", "MONDAY"), + ), + } + + // Update run_tests and paused fields, update days of week in schedule + step2 := resource.TestStep{ + Config: ` + resource "fivetran_dbt_transformation" "transformation" { + provider = fivetran-provider + + dbt_project_id = "dbt_project_id" + dbt_model_name = "dbt_model_name" + run_tests = "true" + paused = "true" + schedule { + schedule_type = "TIME_OF_DAY" + time_of_day = "12:00" + days_of_week = ["MONDAY", "SATURDAY"] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationPatchHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, transformationData) + return nil + }, + + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "run_tests", "true"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.days_of_week.1", "SATURDAY"), + ), + } + + // Update schedule_type and paused fields + step3 := resource.TestStep{ + Config: ` + resource "fivetran_dbt_transformation" "transformation" { + provider = fivetran-provider + + dbt_project_id = "dbt_project_id" + dbt_model_name = "dbt_model_name" + run_tests = "true" + paused = "true" + schedule { + schedule_type = "INTERVAL" + interval = 60 + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationPatchHandler.Interactions, 2) + tfmock.AssertNotEmpty(t, transformationData) + return nil + }, + + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.schedule_type", "INTERVAL"), + resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.interval", "60"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationResource(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationDeleteHandler.Interactions, 1) + tfmock.AssertEmpty(t, transformationData) + return nil + }, + + Steps: []resource.TestStep{ + step1, + step2, + step3, + }, + }, + ) +} diff --git a/templates/data-sources/transformation.md.tmpl b/templates/data-sources/transformation.md.tmpl new file mode 100644 index 00000000..2e0ba9d4 --- /dev/null +++ b/templates/data-sources/transformation.md.tmpl @@ -0,0 +1,17 @@ +--- +page_title: "Data Source: fivetran_transformation" +--- + +# Data Source: fivetran_transformation + +Returns transformation details if a valid identifier was provided + +## Example Usage + +```hcl +data "fivetran_transformation" "test" { + id = "id" +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/data-sources/transformations.md.tmpl b/templates/data-sources/transformations.md.tmpl new file mode 100644 index 00000000..2ab6dca3 --- /dev/null +++ b/templates/data-sources/transformations.md.tmpl @@ -0,0 +1,16 @@ +--- +page_title: "Data Source: fivetran_transformations" +--- + +# Data Source: fivetran_transformations + +Returns a list of all transformations available via API within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_transformations" "test" { +} +``` + +{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/guides/transformation_private_git_deploy_key.md.tmpl b/templates/guides/transformation_private_git_deploy_key.md.tmpl new file mode 100644 index 00000000..8fe4b2ef --- /dev/null +++ b/templates/guides/transformation_private_git_deploy_key.md.tmpl @@ -0,0 +1,60 @@ +---- +page_title: "Transformation Project Setup With Git Private Repo" +subcategory: "Getting Started" +--- + +# How to set up a Transformation Project with private Git Repo. + +To be able to use private Transformation Project Git repository you have to grant Fivetran access to this repo. +To do that you need to add a Deploy Key to your repository. +To get SSH key from Fivetran create `fivetran_transformation_project` resource: + +```hcl +resource "fivetran_group" "my_group" { + name = "My_Group" +} + +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. diff --git a/templates/resources/transformation.md.tmpl b/templates/resources/transformation.md.tmpl new file mode 100644 index 00000000..275f60dc --- /dev/null +++ b/templates/resources/transformation.md.tmpl @@ -0,0 +1,60 @@ +--- +page_title: "Resource: fivetran_transformation" +--- + +# Resource: fivetran_transformation + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete transformation projects in your account. + +## Example Usage + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +{{ .SchemaMarkdown | trimspace }} + +## Import + +1. To import an existing `fivetran_transformation` resource into your Terraform state, you need to get **Transformation ID** via API call `GET https://api.fivetran.com/v1/transformations` to retrieve available projects. +2. Fetch transformation details for particular `transformation-id` using `GET https://api.fivetran.com/v1/transformations/{transformation-id}` to ensure that this is the transformation you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_transformation" "my_imported_fivetran_transformation" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_transformation.my_imported_fivetran_transformation {Transformation ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_transformation.my_imported_fivetran_transformation' +``` + +5. Copy the values and paste them to your `.tf` configuration. + diff --git a/templates/resources/transformation_project.md.tmpl b/templates/resources/transformation_project.md.tmpl index 4f6fe49c..50d69ca7 100644 --- a/templates/resources/transformation_project.md.tmpl +++ b/templates/resources/transformation_project.md.tmpl @@ -56,4 +56,61 @@ terraform import fivetran_transformation_project.my_imported_fivetran_transforma terraform state show 'fivetran_transformation_project.my_imported_fivetran_transformation_project' ``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file +5. Copy the values and paste them to your `.tf` configuration. + + +## How to set up a Transformation Project with private Git Repo. + +To be able to use private Transformation Project Git repository you have to grant Fivetran access to this repo. +To do that you need to add a Deploy Key to your repository. +To get SSH key from Fivetran create `fivetran_transformation_project` resource: + +```hcl +resource "fivetran_group" "my_group" { + name = "My_Group" +} + +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. \ No newline at end of file From 417b6900042371c37bd586e2d5f32f4cfbd4a7a6 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Thu, 23 Jan 2025 22:58:16 +0100 Subject: [PATCH 06/13] transformations --- docs/data-sources/transformation.md | 68 ++ docs/data-sources/transformations.md | 74 ++ .../transformation_private_git_deploy_key.md | 60 ++ docs/resources/transformation.md | 111 +++ docs/resources/transformation_project.md | 59 +- .../framework/core/model/transformation.go | 24 +- .../framework/core/model/transformations.go | 24 +- fivetran/framework/core/schema/dbt_models.go | 1 + fivetran/framework/core/schema/dbt_project.go | 2 + .../framework/core/schema/dbt_projects.go | 1 + .../core/schema/dbt_transformation.go | 2 + .../datasources/transformation_test.go | 104 +-- .../framework/resources/transformation.go | 4 +- .../resources/transformation_test.go | 706 ++++++++++-------- 14 files changed, 846 insertions(+), 394 deletions(-) create mode 100644 docs/data-sources/transformation.md create mode 100644 docs/data-sources/transformations.md create mode 100644 docs/guides/transformation_private_git_deploy_key.md create mode 100644 docs/resources/transformation.md diff --git a/docs/data-sources/transformation.md b/docs/data-sources/transformation.md new file mode 100644 index 00000000..6c255496 --- /dev/null +++ b/docs/data-sources/transformation.md @@ -0,0 +1,68 @@ +--- +page_title: "Data Source: fivetran_transformation" +--- + +# Data Source: fivetran_transformation + +Returns transformation details if a valid identifier was provided + +## Example Usage + +```hcl +data "fivetran_transformation" "test" { + id = "id" +} +``` + + +## Schema + +### Required + +- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. + +### Read-Only + +- `created_at` (String) The timestamp of when the transformation was created in your account. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the transformation. +- `output_model_names` (Set of String) Identifiers of related models. +- `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. +- `schedule` (Block, Read-only) (see [below for nested schema](#nestedblock--schedule)) +- `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). +- `transformation_config` (Block, Read-only) (see [below for nested schema](#nestedblock--transformation_config)) +- `type` (String) Transformation type. + + +### Nested Schema for `schedule` + +Read-Only: + +- `connection_ids` (Set of String) Identifiers of related connectors. +- `cron` (Set of String) Cron schedule: list of CRON strings. +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. +- `interval` (Number) The time interval in minutes between subsequent transformation runs. +- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" + + + +### Nested Schema for `transformation_config` + +Read-Only: + +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified +- `excluded_models` (Set of String) The list of excluded output model names +- `name` (String) The transformation name +- `package_name` (String) The Quickstart transformation package name +- `project_id` (String) The unique identifier for the dbt Core project within the Fivetran system +- `steps` (Attributes List) (see [below for nested schema](#nestedatt--transformation_config--steps)) +- `upgrade_available` (Boolean) The boolean flag indicating that a newer version is available for the transformation package + + +### Nested Schema for `transformation_config.steps` + +Read-Only: + +- `command` (String) The dbt command in the transformation step +- `name` (String) The step name \ No newline at end of file diff --git a/docs/data-sources/transformations.md b/docs/data-sources/transformations.md new file mode 100644 index 00000000..f0863657 --- /dev/null +++ b/docs/data-sources/transformations.md @@ -0,0 +1,74 @@ +--- +page_title: "Data Source: fivetran_transformations" +--- + +# Data Source: fivetran_transformations + +Returns a list of all transformations available via API within your Fivetran account. + +## Example Usage + +```hcl +data "fivetran_transformations" "test" { +} +``` + + +## Schema + +### Optional + +- `transformations` (Block List) (see [below for nested schema](#nestedblock--transformations)) + + +### Nested Schema for `transformations` + +Required: + +- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. + +Read-Only: + +- `created_at` (String) The timestamp of when the transformation was created in your account. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the transformation. +- `output_model_names` (Set of String) Identifiers of related models. +- `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. +- `schedule` (Block, Read-only) (see [below for nested schema](#nestedblock--transformations--schedule)) +- `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). +- `transformation_config` (Block, Read-only) (see [below for nested schema](#nestedblock--transformations--transformation_config)) +- `type` (String) Transformation type. + + +### Nested Schema for `transformations.schedule` + +Read-Only: + +- `connection_ids` (Set of String) Identifiers of related connectors. +- `cron` (Set of String) Cron schedule: list of CRON strings. +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. +- `interval` (Number) The time interval in minutes between subsequent transformation runs. +- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" + + + +### Nested Schema for `transformations.transformation_config` + +Read-Only: + +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified +- `excluded_models` (Set of String) The list of excluded output model names +- `name` (String) The transformation name +- `package_name` (String) The Quickstart transformation package name +- `project_id` (String) The unique identifier for the dbt Core project within the Fivetran system +- `steps` (Attributes List) (see [below for nested schema](#nestedatt--transformations--transformation_config--steps)) +- `upgrade_available` (Boolean) The boolean flag indicating that a newer version is available for the transformation package + + +### Nested Schema for `transformations.transformation_config.steps` + +Read-Only: + +- `command` (String) The dbt command in the transformation step +- `name` (String) The step name \ No newline at end of file diff --git a/docs/guides/transformation_private_git_deploy_key.md b/docs/guides/transformation_private_git_deploy_key.md new file mode 100644 index 00000000..8fe4b2ef --- /dev/null +++ b/docs/guides/transformation_private_git_deploy_key.md @@ -0,0 +1,60 @@ +---- +page_title: "Transformation Project Setup With Git Private Repo" +subcategory: "Getting Started" +--- + +# How to set up a Transformation Project with private Git Repo. + +To be able to use private Transformation Project Git repository you have to grant Fivetran access to this repo. +To do that you need to add a Deploy Key to your repository. +To get SSH key from Fivetran create `fivetran_transformation_project` resource: + +```hcl +resource "fivetran_group" "my_group" { + name = "My_Group" +} + +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. diff --git a/docs/resources/transformation.md b/docs/resources/transformation.md new file mode 100644 index 00000000..ac16ddd6 --- /dev/null +++ b/docs/resources/transformation.md @@ -0,0 +1,111 @@ +--- +page_title: "Resource: fivetran_transformation" +--- + +# Resource: fivetran_transformation + +Resource is in ALPHA state. + +This resource allows you to add, manage and delete transformation projects in your account. + +## Example Usage + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + + +## Schema + +### Optional + +- `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. +- `schedule` (Block, Optional) (see [below for nested schema](#nestedblock--schedule)) +- `transformation_config` (Block, Optional) (see [below for nested schema](#nestedblock--transformation_config)) +- `type` (String) Transformation type. + +### Read-Only + +- `created_at` (String) The timestamp of when the transformation was created in your account. +- `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the transformation. +- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. +- `output_model_names` (Set of String) Identifiers of related models. +- `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). + + +### Nested Schema for `schedule` + +Optional: + +- `connection_ids` (Set of String) Identifiers of related connectors. +- `cron` (Set of String) Cron schedule: list of CRON strings. +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. +- `interval` (Number) The time interval in minutes between subsequent transformation runs. +- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" + + + +### Nested Schema for `transformation_config` + +Optional: + +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified +- `excluded_models` (Set of String) The list of excluded output model names +- `name` (String) The transformation name +- `package_name` (String) The Quickstart transformation package name +- `project_id` (String) The unique identifier for the dbt Core project within the Fivetran system +- `steps` (Attributes List) (see [below for nested schema](#nestedatt--transformation_config--steps)) +- `upgrade_available` (Boolean) The boolean flag indicating that a newer version is available for the transformation package + + +### Nested Schema for `transformation_config.steps` + +Optional: + +- `command` (String) The dbt command in the transformation step +- `name` (String) The step name + +## Import + +1. To import an existing `fivetran_transformation` resource into your Terraform state, you need to get **Transformation ID** via API call `GET https://api.fivetran.com/v1/transformations` to retrieve available projects. +2. Fetch transformation details for particular `transformation-id` using `GET https://api.fivetran.com/v1/transformations/{transformation-id}` to ensure that this is the transformation you want to import. +3. Define an empty resource in your `.tf` configuration: + +```hcl +resource "fivetran_transformation" "my_imported_fivetran_transformation" { + +} +``` + +4. Run the `terraform import` command: + +``` +terraform import fivetran_transformation.my_imported_fivetran_transformation {Transformation ID} +``` + +4. Use the `terraform state show` command to get the values from the state: + +``` +terraform state show 'fivetran_transformation.my_imported_fivetran_transformation' +``` + +5. Copy the values and paste them to your `.tf` configuration. + diff --git a/docs/resources/transformation_project.md b/docs/resources/transformation_project.md index 28bb4109..591e5e3c 100644 --- a/docs/resources/transformation_project.md +++ b/docs/resources/transformation_project.md @@ -93,4 +93,61 @@ terraform import fivetran_transformation_project.my_imported_fivetran_transforma terraform state show 'fivetran_transformation_project.my_imported_fivetran_transformation_project' ``` -5. Copy the values and paste them to your `.tf` configuration. \ No newline at end of file +5. Copy the values and paste them to your `.tf` configuration. + + +## How to set up a Transformation Project with private Git Repo. + +To be able to use private Transformation Project Git repository you have to grant Fivetran access to this repo. +To do that you need to add a Deploy Key to your repository. +To get SSH key from Fivetran create `fivetran_transformation_project` resource: + +```hcl +resource "fivetran_group" "my_group" { + name = "My_Group" +} + +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. \ No newline at end of file diff --git a/fivetran/framework/core/model/transformation.go b/fivetran/framework/core/model/transformation.go index afc83a08..c46061f8 100644 --- a/fivetran/framework/core/model/transformation.go +++ b/fivetran/framework/core/model/transformation.go @@ -178,17 +178,21 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor configAttrValues["excluded_models"] = types.SetNull(types.StringType) } - - subItems := []attr.Value{} - for _, sub := range resp.Data.TransformationConfig.Steps { - subItem := map[string]attr.Value{} - subItem["name"] = types.StringValue(sub.Name) - subItem["command"] = types.StringValue(sub.Command) - - subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) - subItems = append(subItems, subObjectValue) + if resp.Data.TransformationConfig.Steps != nil { + subItems := []attr.Value{} + for _, sub := range resp.Data.TransformationConfig.Steps { + subItem := map[string]attr.Value{} + subItem["name"] = types.StringValue(sub.Name) + subItem["command"] = types.StringValue(sub.Command) + + subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) + subItems = append(subItems, subObjectValue) + } + configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) + } else { + configAttrValues["steps"] = types.ListNull(stepSetAttrType) } - configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) + d.Config = types.ObjectValueMust(configAttrs, configAttrValues) } diff --git a/fivetran/framework/core/model/transformations.go b/fivetran/framework/core/model/transformations.go index edbc6bca..303db289 100644 --- a/fivetran/framework/core/model/transformations.go +++ b/fivetran/framework/core/model/transformations.go @@ -157,17 +157,21 @@ func (d *Transformations) ReadFromResponse(ctx context.Context, resp transformat } else { configAttrValues["excluded_models"] = types.SetNull(types.StringType) } - - subItems := []attr.Value{} - for _, sub := range v.TransformationConfig.Steps { - subItem := map[string]attr.Value{} - subItem["name"] = types.StringValue(sub.Name) - subItem["command"] = types.StringValue(sub.Command) - - subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) - subItems = append(subItems, subObjectValue) + + if v.TransformationConfig.Steps != nil { + subItems := []attr.Value{} + for _, sub := range v.TransformationConfig.Steps { + subItem := map[string]attr.Value{} + subItem["name"] = types.StringValue(sub.Name) + subItem["command"] = types.StringValue(sub.Command) + + subObjectValue, _ := types.ObjectValue(stepAttrTypes, subItem) + subItems = append(subItems, subObjectValue) + } + configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) + } else { + configAttrValues["steps"] = types.ListNull(stepSetAttrType) } - configAttrValues["steps"], _ = types.ListValue(stepSetAttrType, subItems) item["transformation_config"] = types.ObjectValueMust(configAttrs, configAttrValues) diff --git a/fivetran/framework/core/schema/dbt_models.go b/fivetran/framework/core/schema/dbt_models.go index 7d764a5e..99ba4e8f 100644 --- a/fivetran/framework/core/schema/dbt_models.go +++ b/fivetran/framework/core/schema/dbt_models.go @@ -7,6 +7,7 @@ import ( func DbtModelsDatasource() datasourceSchema.Schema { return datasourceSchema.Schema{ + DeprecationMessage: "This datasource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Attributes: map[string]datasourceSchema.Attribute{ "id": datasourceSchema.StringAttribute{ Computed: true, diff --git a/fivetran/framework/core/schema/dbt_project.go b/fivetran/framework/core/schema/dbt_project.go index 938f1ce2..5c73039a 100644 --- a/fivetran/framework/core/schema/dbt_project.go +++ b/fivetran/framework/core/schema/dbt_project.go @@ -19,6 +19,7 @@ func DbtProjectResource(ctx context.Context) resourceSchema.Schema { } return resourceSchema.Schema{ Attributes: attributes, + DeprecationMessage: "This datasource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Blocks: dbtProjectResourceBlocks(ctx), Version: 1, } @@ -34,6 +35,7 @@ func DbtProjectDatasource() datasourceSchema.Schema { } return datasourceSchema.Schema{ Attributes: attributes, + DeprecationMessage: "This datasource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Blocks: dbtProjectDatasourceBlocks(), } } diff --git a/fivetran/framework/core/schema/dbt_projects.go b/fivetran/framework/core/schema/dbt_projects.go index 05806bf0..8d8fc36a 100644 --- a/fivetran/framework/core/schema/dbt_projects.go +++ b/fivetran/framework/core/schema/dbt_projects.go @@ -4,6 +4,7 @@ import datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasou func DbtProjectsSchema() datasourceSchema.Schema { return datasourceSchema.Schema{ + DeprecationMessage: "This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Attributes: map[string]datasourceSchema.Attribute{ "projects": datasourceSchema.ListNestedAttribute{ Computed: true, diff --git a/fivetran/framework/core/schema/dbt_transformation.go b/fivetran/framework/core/schema/dbt_transformation.go index 2c00bd84..6949b6e5 100644 --- a/fivetran/framework/core/schema/dbt_transformation.go +++ b/fivetran/framework/core/schema/dbt_transformation.go @@ -11,6 +11,7 @@ import ( func DbtTransformationResourceSchema(ctx context.Context) resourceSchema.Schema { return resourceSchema.Schema{ + DeprecationMessage: "This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Attributes: dbtTransformationSchema().GetResourceSchema(), Blocks: dbtTransformationResourceBlocks(ctx), } @@ -18,6 +19,7 @@ func DbtTransformationResourceSchema(ctx context.Context) resourceSchema.Schema func DbtTransformationDatasourceSchema() datasourceSchema.Schema { return datasourceSchema.Schema{ + DeprecationMessage: "This datasource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Attributes: dbtTransformationSchema().GetDatasourceSchema(), Blocks: dbtTransformationDatasourceBlocks(), } diff --git a/fivetran/framework/datasources/transformation_test.go b/fivetran/framework/datasources/transformation_test.go index 4ff9ebc9..6a6de39f 100644 --- a/fivetran/framework/datasources/transformation_test.go +++ b/fivetran/framework/datasources/transformation_test.go @@ -1,22 +1,22 @@ package datasources_test import ( - "net/http" - "testing" + "net/http" + "testing" - "github.com/fivetran/go-fivetran/tests/mock" - tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var ( - transformationDataSourceMockGetHandler *mock.Handler - transformationDataSourceMockData map[string]interface{} + transformationDataSourceMockGetHandler *mock.Handler + transformationDataSourceMockData map[string]interface{} ) func setupMockClienttransformationDataSourceMappingTest(t *testing.T) { - transformationResponse := ` + transformationResponse := ` { "id": "transformation_id", "status": "status", @@ -71,31 +71,31 @@ func setupMockClienttransformationDataSourceMappingTest(t *testing.T) { "upgrade_available": true } }` - tfmock.MockClient().Reset() + tfmock.MockClient().Reset() - transformationDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - transformationDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationResponse) - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationDataSourceMockData), nil - }, - ) + transformationDataSourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + transformationDataSourceMockData = tfmock.CreateMapFromJsonString(t, transformationResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationDataSourceMockData), nil + }, + ) } func TestDataSourcetransformationMappingMock(t *testing.T) { - // NOTE: the config is totally inconsistent and contains all possible values for mapping test - step1 := resource.TestStep{ - Config: ` - data "fivetran_transformation" "transformation" { - provider = fivetran-provider - id = "transformation_id" - }`, + // NOTE: the config is totally inconsistent and contains all possible values for mapping test + step1 := resource.TestStep{ + Config: ` + data "fivetran_transformation" "transformation" { + provider = fivetran-provider + id = "transformation_id" + }`, - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationDataSourceMockGetHandler.Interactions, 1) - tfmock.AssertNotEmpty(t, transformationDataSourceMockData) - return nil - }, + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationDataSourceMockGetHandler.Interactions, 1) + tfmock.AssertNotEmpty(t, transformationDataSourceMockData) + return nil + }, resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "id", "transformation_id"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "status", "status"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "created_at", "created_at"), @@ -110,12 +110,12 @@ func TestDataSourcetransformationMappingMock(t *testing.T) { resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.0.command", "command1"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.1.name", "name2"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.steps.1.command", "command2"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.package_name", "package_name"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.0", "connection_id1"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.1", "connection_id2"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.0", "excluded_model1"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.1", "excluded_model2"), - resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.upgrade_available", "true"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.package_name", "package_name"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.0", "excluded_model1"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.excluded_models.1", "excluded_model2"), + resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "transformation_config.upgrade_available", "true"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.smart_syncing", "true"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.interval", "60"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type"), @@ -126,22 +126,22 @@ func TestDataSourcetransformationMappingMock(t *testing.T) { resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), resource.TestCheckResourceAttr("data.fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day"), - ), - } + ), + } - resource.Test( - t, - resource.TestCase{ - PreCheck: func() { - setupMockClienttransformationDataSourceMappingTest(t) - }, - ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, - CheckDestroy: func(s *terraform.State) error { - return nil - }, - Steps: []resource.TestStep{ - step1, - }, - }, - ) + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClienttransformationDataSourceMappingTest(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + return nil + }, + Steps: []resource.TestStep{ + step1, + }, + }, + ) } diff --git a/fivetran/framework/resources/transformation.go b/fivetran/framework/resources/transformation.go index 087820d5..23f5ed94 100644 --- a/fivetran/framework/resources/transformation.go +++ b/fivetran/framework/resources/transformation.go @@ -92,7 +92,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { evars := []transformations.TransformationStep{} - for _, ev := range configAttributes["steps"].(basetypes.SetValue).Elements() { + for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { if element, ok := ev.(basetypes.ObjectValue); ok { step := transformations.TransformationStep{} step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() @@ -265,7 +265,7 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { evars := []transformations.TransformationStep{} - for _, ev := range configAttributes["steps"].(basetypes.SetValue).Elements() { + for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { if element, ok := ev.(basetypes.ObjectValue); ok { var step transformations.TransformationStep step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() diff --git a/fivetran/framework/resources/transformation_test.go b/fivetran/framework/resources/transformation_test.go index 13e9d9a6..62fcfc0c 100644 --- a/fivetran/framework/resources/transformation_test.go +++ b/fivetran/framework/resources/transformation_test.go @@ -1,335 +1,403 @@ package resources_test import ( - "net/http" - "testing" - "time" - - "github.com/fivetran/go-fivetran/tests/mock" - tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" + "net/http" + "testing" + + "github.com/fivetran/go-fivetran/tests/mock" + tfmock "github.com/fivetran/terraform-provider-fivetran/fivetran/tests/mock" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var ( - transformationPostHandler *mock.Handler - transformationPatchHandler *mock.Handler - transformationDeleteHandler *mock.Handler - transformationData map[string]interface{} + transformationGitPostHandler *mock.Handler + transformationQuickstartPostHandler *mock.Handler + transformationGitData map[string]interface{} + transformationQuickstartData map[string]interface{} + + transformationGitDeleteHandler *mock.Handler + transformationQuickstartDeleteHandler *mock.Handler + + gitResponse = `{ + "id": "transformation_id", + "status": "status", + "schedule": { + "cron": [ + "cron1","cron2" + ], + "interval": 601, + "smart_syncing": true, + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "schedule_type": "schedule_type1", + "days_of_week": [ + "days_of_week1", + "days_of_week2" + ], + "time_of_day": "time_of_day1" + }, + "type": "DBT_CORE", + "paused": true, + "created_at": "created_at", + "output_model_names": [ + "output_model_name1", + "output_model_name2" + ], + "created_by_id": "created_by_id", + "transformation_config": { + "project_id": "project_id", + "name": "name", + "steps": [ + { + "name": "name1", + "command": "command1" + }, + { + "name": "name2", + "command": "command2" + } + ] + } + }` + + quickstartResponse = `{ + "id": "transformation_id", + "status": "status", + "schedule": { + "cron": [ + "cron1","cron2" + ], + "interval": 601, + "smart_syncing": true, + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "schedule_type": "schedule_type1", + "days_of_week": [ + "days_of_week1", + "days_of_week2" + ], + "time_of_day": "time_of_day1" + }, + "type": "QUICKSTART", + "paused": true, + "created_at": "created_at", + "output_model_names": [ + "output_model_name1", + "output_model_name2" + ], + "created_by_id": "created_by_id", + "transformation_config": { + "package_name": "package_name", + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "excluded_models": [ + "excluded_model1","excluded_model2" + ], + "upgrade_available": true + } + }` ) -func onPostTranformation(t *testing.T, req *http.Request) (*http.Response, error) { - tfmock.AssertEmpty(t, transformationData) - - body := tfmock.RequestBodyToJson(t, req) -v - // Check the request - tfmock.AssertEqual(t, len(body), 4) - - tfmock.AssertKeyExistsAndHasValue(t, body, "dbt_model_id", "dbt_model_id") - tfmock.AssertKeyExistsAndHasValue(t, body, "paused", false) - tfmock.AssertKeyExistsAndHasValue(t, body, "run_tests", false) - - requestSchedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) - - tfmock.AssertKeyExistsAndHasValue(t, requestSchedule, "schedule_type", "TIME_OF_DAY") - tfmock.AssertKeyExistsAndHasValue(t, requestSchedule, "time_of_day", "12:00") - - requestScheduleDays := tfmock.AssertKeyExists(t, requestSchedule, "days_of_week").([]interface{}) - - expectedDays := make([]interface{}, 0) - - expectedDays = append(expectedDays, "MONDAY") - //expectedDays = append(expectedDays, "SATURDAY") - - tfmock.AssertArrayItems(t, requestScheduleDays, expectedDays) - - // Add response fields - body["id"] = "transformation_id" - body["dbt_project_id"] = "dbt_project_id" - body["output_model_name"] = "output_model_name" - - connectorIds := make([]string, 0) - body["connector_ids"] = append(connectorIds, "connector_id") - - modelIds := make([]string, 0) - body["model_ids"] = append(modelIds, "model_id") - - body["created_at"] = time.Now().Format("2006-01-02T15:04:05.000000Z") - - transformationData = body - - response := tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "", transformationData) - - return response, nil +func setupMockClientTransformationGitResource(t *testing.T) { + tfmock.MockClient().Reset() + + transformationGitPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformations").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + tfmock.AssertKeyExistsAndHasValue(t, body, "type", "DBT_CORE") + tfmock.AssertKeyExistsAndHasValue(t, body, "paused", true) + + tfmock.AssertKeyExists(t, body, "transformation_config") + config := body["transformation_config"].(map[string]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, config, "project_id", "project_id") + tfmock.AssertKeyExistsAndHasValue(t, config, "name", "name") + + steps := config["steps"].([]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, steps[0].(map[string]interface{}), "name", "name1") + tfmock.AssertKeyExistsAndHasValue(t, steps[0].(map[string]interface{}), "command", "command1") + tfmock.AssertKeyExistsAndHasValue(t, steps[1].(map[string]interface{}), "name", "name2") + tfmock.AssertKeyExistsAndHasValue(t, steps[1].(map[string]interface{}), "command", "command2") + + tfmock.AssertKeyExists(t, body, "schedule") + schedule := body["schedule"].(map[string]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "interval", float64(601)) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "smart_syncing", true) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "schedule_type", "schedule_type1") + tfmock.AssertKeyExistsAndHasValue(t, schedule, "time_of_day", "time_of_day1") + + cron := schedule["cron"].([]interface{}) + tfmock.AssertEqual(t, len(cron), 2) + tfmock.AssertEqual(t, cron[0], "cron1") + tfmock.AssertEqual(t, cron[1], "cron2") + + connectionIds := schedule["connection_ids"].([]interface{}) + tfmock.AssertEqual(t, len(connectionIds), 2) + tfmock.AssertEqual(t, connectionIds[0], "connection_id1") + tfmock.AssertEqual(t, connectionIds[1], "connection_id2") + + daysOfWeek := schedule["days_of_week"].([]interface{}) + tfmock.AssertEqual(t, len(daysOfWeek), 2) + tfmock.AssertEqual(t, daysOfWeek[0], "days_of_week1") + tfmock.AssertEqual(t, daysOfWeek[1], "days_of_week2") + + transformationGitData = tfmock.CreateMapFromJsonString(t, gitResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationGitData), nil + }, + ) + + tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationGitData) + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationGitData) + return response, nil + }, + ) + + transformationGitDeleteHandler = tfmock.MockClient().When(http.MethodDelete, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationGitData) + transformationGitData = nil + response := tfmock.FivetranSuccessResponse(t, req, 200, "", nil) + return response, nil + }, + ) } -func onPatchTransformation(t *testing.T, req *http.Request, updateIteration int) (*http.Response, error) { - tfmock.AssertNotEmpty(t, transformationData) - - body := tfmock.RequestBodyToJson(t, req) - - if updateIteration == 0 { - // Check the request - tfmock.AssertEqual(t, len(body), 3) - tfmock.AssertKeyExistsAndHasValue(t, body, "paused", true) - tfmock.AssertKeyExistsAndHasValue(t, body, "run_tests", true) - requestSchedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) - - requestScheduleDays := tfmock.AssertKeyExists(t, requestSchedule, "days_of_week").([]interface{}) - expectedDays := make([]interface{}, 0) - expectedDays = append(expectedDays, "MONDAY") - expectedDays = append(expectedDays, "SATURDAY") - - tfmock.AssertArrayItems(t, requestScheduleDays, expectedDays) - - // Update saved values - for k, v := range body { - if k != "schedule" { - transformationData[k] = v - } else { - stateSchedule := transformationData[k].(map[string]interface{}) - stateSchedule["days_of_week"] = expectedDays - } - } - - response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Transformation has been updated", transformationData) - return response, nil - } - - if updateIteration == 1 { - // Check the request - tfmock.AssertEqual(t, len(body), 1) - schedule := tfmock.AssertKeyExists(t, body, "schedule").(map[string]interface{}) - tfmock.AssertKeyExistsAndHasValue(t, schedule, "schedule_type", "INTERVAL") - tfmock.AssertKeyExistsAndHasValue(t, schedule, "interval", float64(60)) - - // Update saved values - for k, v := range body { - transformationData[k] = v - } - - response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Transformation has been updated", transformationData) - return response, nil - } - - response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationData) - - return response, nil +func setupMockClientTransformationQuickstartResource(t *testing.T) { + tfmock.MockClient().Reset() + + transformationQuickstartPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/transformations").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + tfmock.AssertKeyExistsAndHasValue(t, body, "type", "QUICKSTART") + tfmock.AssertKeyExistsAndHasValue(t, body, "paused", true) + + tfmock.AssertKeyExists(t, body, "transformation_config") + config := body["transformation_config"].(map[string]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, config, "package_name", "package_name") + connectionIds := config["connection_ids"].([]interface{}) + tfmock.AssertEqual(t, len(connectionIds), 2) + tfmock.AssertEqual(t, connectionIds[0], "connection_id1") + tfmock.AssertEqual(t, connectionIds[1], "connection_id2") + excludedModels := config["excluded_models"].([]interface{}) + tfmock.AssertEqual(t, len(excludedModels), 2) + tfmock.AssertEqual(t, excludedModels[0], "excluded_model1") + tfmock.AssertEqual(t, excludedModels[1], "excluded_model2") + + tfmock.AssertKeyExists(t, body, "schedule") + schedule := body["schedule"].(map[string]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "interval", float64(601)) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "smart_syncing", true) + tfmock.AssertKeyExistsAndHasValue(t, schedule, "schedule_type", "schedule_type1") + tfmock.AssertKeyExistsAndHasValue(t, schedule, "time_of_day", "time_of_day1") + + cron := schedule["cron"].([]interface{}) + tfmock.AssertEqual(t, len(cron), 2) + tfmock.AssertEqual(t, cron[0], "cron1") + tfmock.AssertEqual(t, cron[1], "cron2") + + connectionIds = schedule["connection_ids"].([]interface{}) + tfmock.AssertEqual(t, len(connectionIds), 2) + tfmock.AssertEqual(t, connectionIds[0], "connection_id1") + tfmock.AssertEqual(t, connectionIds[1], "connection_id2") + + daysOfWeek := schedule["days_of_week"].([]interface{}) + tfmock.AssertEqual(t, len(daysOfWeek), 2) + tfmock.AssertEqual(t, daysOfWeek[0], "days_of_week1") + tfmock.AssertEqual(t, daysOfWeek[1], "days_of_week2") + + transformationQuickstartData = tfmock.CreateMapFromJsonString(t, quickstartResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationQuickstartData), nil + }, + ) + + tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationQuickstartData) + response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationQuickstartData) + return response, nil + }, + ) + + transformationQuickstartDeleteHandler = tfmock.MockClient().When(http.MethodDelete, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + tfmock.AssertNotEmpty(t, transformationQuickstartData) + transformationQuickstartData = nil + response := tfmock.FivetranSuccessResponse(t, req, 200, "", nil) + return response, nil + }, + ) } -func setupMockClientTransformationResource(t *testing.T) { - tfmock.MockClient().Reset() - transformationData = nil - updateCounter := 0 - - transformationPostHandler = tfmock.MockClient().When(http.MethodPost, "/v1/dbt/transformations").ThenCall( - func(req *http.Request) (*http.Response, error) { - return onPostTranformation(t, req) - }, - ) - - tfmock.MockClient().When(http.MethodGet, "/v1/dbt/transformations/transformation_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - tfmock.AssertNotEmpty(t, transformationData) - response := tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "", transformationData) - return response, nil - }, - ) - - transformationPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/dbt/transformations/transformation_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - response, err := onPatchTransformation(t, req, updateCounter) - updateCounter++ - return response, err - }, - ) - - transformationDeleteHandler = tfmock.MockClient().When(http.MethodDelete, "/v1/dbt/transformations/transformation_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - tfmock.AssertNotEmpty(t, transformationData) - transformationData = nil - response := tfmock.FivetranSuccessResponse(t, req, 200, "", nil) - return response, nil - }, - ) - - projectResponse := `{ - "id": "project_id", - "group_id": "group_id", - "dbt_version": "dbt_version", - "created_at": "created_at", - "created_by_id": "created_by_id", - "public_key": "public_key", - "default_schema": "default_schema", - "target_name": "target_name", - "environment_vars": ["environment_var"], - "threads": 1, - "type": "GIT", - "project_config": { - "git_remote_url": "git_remote_url", - "git_branch": "git_branch", - "folder_path": "folder_path" - }, - "status": "READY" - } - ` - - tfmock.MockClient().When(http.MethodGet, "/v1/dbt/projects/dbt_project_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, projectResponse)), nil - }, - ) - - modelsMappingResponse := ` - { - "items":[ - { - "id": "dbt_model_id", - "model_name": "dbt_model_name", - "scheduled": true - } - ], - "next_cursor": null +func TestResourceTransformationGitMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "DBT_CORE" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + project_id = "project_id" + name = "name" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name2" + command = "command2" + } + ] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationGitPostHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "status", "status"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "type", "DBT_CORE"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.1", "output_model_name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.project_id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.name", "name"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.0.name", "name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.0.command", "command1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.1.name", "name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.1.command", "command2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.interval", "601"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.1", "cron2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day1"), + ), } - ` - - modelMappingResponse := ` - { - "id": "dbt_model_id", - "model_name": "dbt_model_name", - "scheduled": true - } - ` - - tfmock.MockClient().When(http.MethodGet, "/v1/dbt/models/dbt_model_id").ThenCall( - func(req *http.Request) (*http.Response, error) { - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, modelMappingResponse)), nil - }, - ) - - tfmock.MockClient().When(http.MethodGet, "/v1/dbt/models").ThenCall( - func(req *http.Request) (*http.Response, error) { - project_id := req.URL.Query().Get("project_id") - tfmock.AssertEqual(t, project_id, "dbt_project_id") - return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", tfmock.CreateMapFromJsonString(t, modelsMappingResponse)), nil - }, - ) + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationGitResource(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationGitDeleteHandler.Interactions, 1) + tfmock.AssertEmpty(t, transformationData) + return nil + }, + + Steps: []resource.TestStep{ + step1, + }, + }, + ) } -func TestResourceTransformationMock(t *testing.T) { - step1 := resource.TestStep{ - Config: ` - resource "fivetran_dbt_transformation" "transformation" { - provider = fivetran-provider - - dbt_project_id = "dbt_project_id" - dbt_model_name = "dbt_model_name" - run_tests = "false" - paused = "false" - schedule { - schedule_type = "TIME_OF_DAY" - time_of_day = "12:00" - days_of_week = ["MONDAY"] - } - } - `, - - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationPostHandler.Interactions, 1) - tfmock.AssertNotEmpty(t, transformationData) - return nil - }, - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "id", "transformation_id"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "dbt_model_id", "dbt_model_id"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "run_tests", "false"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "paused", "false"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.schedule_type", "TIME_OF_DAY"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.time_of_day", "12:00"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.days_of_week.0", "MONDAY"), - ), - } - - // Update run_tests and paused fields, update days of week in schedule - step2 := resource.TestStep{ - Config: ` - resource "fivetran_dbt_transformation" "transformation" { - provider = fivetran-provider - - dbt_project_id = "dbt_project_id" - dbt_model_name = "dbt_model_name" - run_tests = "true" - paused = "true" - schedule { - schedule_type = "TIME_OF_DAY" - time_of_day = "12:00" - days_of_week = ["MONDAY", "SATURDAY"] - } - } - `, - - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationPatchHandler.Interactions, 1) - tfmock.AssertNotEmpty(t, transformationData) - return nil - }, - - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "run_tests", "true"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "paused", "true"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.days_of_week.1", "SATURDAY"), - ), - } - - // Update schedule_type and paused fields - step3 := resource.TestStep{ - Config: ` - resource "fivetran_dbt_transformation" "transformation" { - provider = fivetran-provider - - dbt_project_id = "dbt_project_id" - dbt_model_name = "dbt_model_name" - run_tests = "true" - paused = "true" - schedule { - schedule_type = "INTERVAL" - interval = 60 - } - } - `, - - Check: resource.ComposeAggregateTestCheckFunc( - func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationPatchHandler.Interactions, 2) - tfmock.AssertNotEmpty(t, transformationData) - return nil - }, - - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.schedule_type", "INTERVAL"), - resource.TestCheckResourceAttr("fivetran_dbt_transformation.transformation", "schedule.interval", "60"), - ), - } - - resource.Test( - t, - resource.TestCase{ - PreCheck: func() { - setupMockClientTransformationResource(t) - }, - ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, - CheckDestroy: func(s *terraform.State) error { - tfmock.AssertEqual(t, transformationDeleteHandler.Interactions, 1) - tfmock.AssertEmpty(t, transformationData) - return nil - }, - - Steps: []resource.TestStep{ - step1, - step2, - step3, - }, - }, - ) -} +func TestResourceTransformationQuickstartMock(t *testing.T) { + step1 := resource.TestStep{ + Config: ` + resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "QUICKSTART" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + package_name = "package_name" + connection_ids = ["connection_id1", "connection_id2"] + excluded_models = ["excluded_model1", "excluded_model2"] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationQuickstartPostHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "status", "status"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "type", "QUICKSTART"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.1", "output_model_name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.package_name", "package_name"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.excluded_models.0", "excluded_model1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.excluded_models.1", "excluded_model2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.upgrade_available", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.interval", "601"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.1", "cron2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day1"), + ), + } + + resource.Test( + t, + resource.TestCase{ + PreCheck: func() { + setupMockClientTransformationQuickstartResource(t) + }, + ProtoV6ProviderFactories: tfmock.ProtoV6ProviderFactories, + CheckDestroy: func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationQuickstartDeleteHandler.Interactions, 1) + return nil + }, + + Steps: []resource.TestStep{ + step1, + }, + }, + ) +} \ No newline at end of file From fd05aaf81d40238f5f2576658ffd322b41b35be1 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Thu, 23 Jan 2025 23:10:01 +0100 Subject: [PATCH 07/13] docs --- docs/guides/version_1.5.0_update_guides.md | 180 ++++++++++++++++++ docs/resources/dbt_git_project_config.md | 2 +- docs/resources/dbt_project.md | 2 +- docs/resources/dbt_transformation.md | 2 +- docs/resources/transformation.md | 69 +++++-- .../core/schema/dbt_git_project_config.go | 1 + .../version_1.5.0_update_guides.md.tmpl | 180 ++++++++++++++++++ .../resources/dbt_git_project_config.md.tmpl | 2 +- templates/resources/dbt_project.md.tmpl | 2 +- .../resources/dbt_transformation.md.tmpl | 2 +- templates/resources/transformation.md.tmpl | 69 +++++-- 11 files changed, 477 insertions(+), 34 deletions(-) create mode 100644 docs/guides/version_1.5.0_update_guides.md create mode 100644 templates/guides/version_1.5.0_update_guides.md.tmpl diff --git a/docs/guides/version_1.5.0_update_guides.md b/docs/guides/version_1.5.0_update_guides.md new file mode 100644 index 00000000..97bc7c8b --- /dev/null +++ b/docs/guides/version_1.5.0_update_guides.md @@ -0,0 +1,180 @@ +---- +page_title: "Version Update 1.5.0" +subcategory: "Upgrade Guides" +--- + +# Version 1.5.0 + +## What's new in 1.5.0 + +In version `1.5.0` of Fivetran Terraform provider, цe have implemented new resources for managing Transformations: + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 1.4.2" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 1.5.0" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_dbt_project` + +Replace all your resources `fivetran_dbt_project` with `fivetran_transformation_project` + +Previous configuration: + +```hcl +resource "fivetran_dbt_project" "test_project" { + provider = fivetran-provider + group_id = fivetran_destination.test_destination.id + dbt_version = "1.0.1" + threads = 1 + default_schema = "dbt_demo_test_e2e_terraform" + type = "GIT" + project_config { + folder_path = "/folder/path" + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" + folder_path = "/folder/path" + dbt_version = "1.0.1" + default_schema = "dbt_demo_test_e2e_terraform" + threads = 1 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. + +### Resource `fivetran_dbt_transformation` + +Replace all your resources `fivetran_dbt_transformation` with `fivetran_transformation` + +Previous configuration: + +```hcl +resource "fivetran_dbt_transformation" "transformation" { + dbt_model_name = "dbt_model_name" + dbt_project_id = "dbt_project_id" + run_tests = "false" + paused = "false" + schedule { + schedule_type = "TIME_OF_DAY" + time_of_day = "12:00" + days_of_week = ["MONDAY", "SATURDAY"] + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "DBT_CORE" + paused = false + + schedule { + cron = ["cron1","cron2"] + interval = 60 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "TIME_OF_DAY" + days_of_week = ["MONDAY", "SATURDAY"] + time_of_day = "14:00" + } + + transformation_config { + project_id = "dbt_project_id" + name = "name" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name2" + command = "command2" + } + ] + } +} +``` + +### Datasources `fivetran_dbt_project`, `fivetran_dbt_projects`, `fivetran_dbt_transformation`, `fivetran_dbt_models` + +Replace datasources: +- `fivetran_dbt_project` with `fivetran_transformation_project` +- `fivetran_dbt_projects` with `fivetran_transformation_projects` +- `fivetran_dbt_transformation` with `fivetran_transformation` +Remove datasource `fivetran_dbt_models` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/docs/resources/dbt_git_project_config.md b/docs/resources/dbt_git_project_config.md index 81f29780..d0e8e203 100644 --- a/docs/resources/dbt_git_project_config.md +++ b/docs/resources/dbt_git_project_config.md @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_git_project_config" # Resource: fivetran_dbt_git_project_config -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add and manage dbt Git Projects Configs. diff --git a/docs/resources/dbt_project.md b/docs/resources/dbt_project.md index f6eccf5b..074677d0 100644 --- a/docs/resources/dbt_project.md +++ b/docs/resources/dbt_project.md @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_project" # Resource: fivetran_dbt_project -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add, manage and delete dbt Projects in your account. diff --git a/docs/resources/dbt_transformation.md b/docs/resources/dbt_transformation.md index 98845ee1..e3a1dab9 100644 --- a/docs/resources/dbt_transformation.md +++ b/docs/resources/dbt_transformation.md @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_transformation" # Resource: fivetran_dbt_transformation -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add, manage and delete dbt Transformations for existing dbt Model. To retrieve available dbt Models use this [Retrieve dbt Project models](https://fivetran.com/docs/rest-api/dbt-transformation-management#retrievedbtprojectmodels) endpoint. diff --git a/docs/resources/transformation.md b/docs/resources/transformation.md index ac16ddd6..1eb63d09 100644 --- a/docs/resources/transformation.md +++ b/docs/resources/transformation.md @@ -8,24 +8,65 @@ Resource is in ALPHA state. This resource allows you to add, manage and delete transformation projects in your account. -## Example Usage +## Example Usage for dbt Core Transformation ```hcl resource "fivetran_transformation" "transformation" { provider = fivetran-provider - group_id = "group_id" - type = "DBT_GIT" - run_tests = true - - project_config { - git_remote_url = "git_remote_url" - git_branch = "git_branch" - folder_path = "folder_path" - dbt_version = "dbt_version" - default_schema = "default_schema" - threads = 0 - target_name = "target_name" - environment_vars = ["environment_var"] + + type = "DBT_CORE" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + project_id = "project_id" + name = "name" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name2" + command = "command2" + } + ] + } +} +``` + +## Example Usage for Quickstart Transformation + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "QUICKSTART" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + package_name = "package_name" + connection_ids = ["connection_id1", "connection_id2"] + excluded_models = ["excluded_model1", "excluded_model2"] } } ``` diff --git a/fivetran/framework/core/schema/dbt_git_project_config.go b/fivetran/framework/core/schema/dbt_git_project_config.go index 875b4292..52082f50 100644 --- a/fivetran/framework/core/schema/dbt_git_project_config.go +++ b/fivetran/framework/core/schema/dbt_git_project_config.go @@ -7,6 +7,7 @@ import ( func DbtGitProjectConfigSchema() resourceSchema.Schema { return resourceSchema.Schema{ + DeprecationMessage: "This datasource is Deprecated, please follow the 1.5.0 migration guide to update the schema", Attributes: dbtGitProjectConfigSchema().GetResourceSchema(), } } diff --git a/templates/guides/version_1.5.0_update_guides.md.tmpl b/templates/guides/version_1.5.0_update_guides.md.tmpl new file mode 100644 index 00000000..97bc7c8b --- /dev/null +++ b/templates/guides/version_1.5.0_update_guides.md.tmpl @@ -0,0 +1,180 @@ +---- +page_title: "Version Update 1.5.0" +subcategory: "Upgrade Guides" +--- + +# Version 1.5.0 + +## What's new in 1.5.0 + +In version `1.5.0` of Fivetran Terraform provider, цe have implemented new resources for managing Transformations: + +## Migration guide + +### Provider + +Update your provider configuration in the following way: + +Previous configuration: + +```hcl +required_providers { + fivetran = { + version = "~> 1.4.2" + source = "fivetran/fivetran" + } + } +``` + +Updated configuration: + +```hcl +required_providers { + fivetran = { + version = ">= 1.5.0" + source = "fivetran/fivetran" + } + } +``` + +### Resource `fivetran_dbt_project` + +Replace all your resources `fivetran_dbt_project` with `fivetran_transformation_project` + +Previous configuration: + +```hcl +resource "fivetran_dbt_project" "test_project" { + provider = fivetran-provider + group_id = fivetran_destination.test_destination.id + dbt_version = "1.0.1" + threads = 1 + default_schema = "dbt_demo_test_e2e_terraform" + type = "GIT" + project_config { + folder_path = "/folder/path" + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git@github.com:fivetran/repo-name.git" + git_branch = "main" + folder_path = "/folder/path" + dbt_version = "1.0.1" + default_schema = "dbt_demo_test_e2e_terraform" + threads = 1 + target_name = "target_name" + environment_vars = ["environment_var"] + } +} +``` + +Then you need to set up the Transformation Project public key (field `public_key` in created resource) as a deploy key into your repo using: + +[GitHub Provider Repository Deploy Key Resource](https://registry.terraform.io/providers/integrations/github/latest/docs/resources/repository_deploy_key): +```hcl +resource "github_repository_deploy_key" "example_repository_deploy_key" { + title = "Repository test key" + repository = "repo-owner/repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + read_only = true +} +``` + +or + +[Bitbucket Provider Repository Deploy Key Resource]https://registry.terraform.io/providers/DrFaust92/bitbucket/latest/docs/resources/deploy_key) +```hcl +resource "bitbucket_deploy_key" "test" { + workspace = "repo-owner" + repository = "repo-name" + key = fivetran_transformation_project.test_project.project_config.public_key + label = "Repository test key" +} +``` + +Since we recommend using third-party providers in this case, please make sure that access to the repositories is provided correctly and the providers are configured correctly for connection. + +### Resource `fivetran_dbt_transformation` + +Replace all your resources `fivetran_dbt_transformation` with `fivetran_transformation` + +Previous configuration: + +```hcl +resource "fivetran_dbt_transformation" "transformation" { + dbt_model_name = "dbt_model_name" + dbt_project_id = "dbt_project_id" + run_tests = "false" + paused = "false" + schedule { + schedule_type = "TIME_OF_DAY" + time_of_day = "12:00" + days_of_week = ["MONDAY", "SATURDAY"] + } +} +``` + +Updated configuration: + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "DBT_CORE" + paused = false + + schedule { + cron = ["cron1","cron2"] + interval = 60 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "TIME_OF_DAY" + days_of_week = ["MONDAY", "SATURDAY"] + time_of_day = "14:00" + } + + transformation_config { + project_id = "dbt_project_id" + name = "name" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name2" + command = "command2" + } + ] + } +} +``` + +### Datasources `fivetran_dbt_project`, `fivetran_dbt_projects`, `fivetran_dbt_transformation`, `fivetran_dbt_models` + +Replace datasources: +- `fivetran_dbt_project` with `fivetran_transformation_project` +- `fivetran_dbt_projects` with `fivetran_transformation_projects` +- `fivetran_dbt_transformation` with `fivetran_transformation` +Remove datasource `fivetran_dbt_models` + +### Update terraform state + +Once all configurations have been updated, run: + +``` +terraform init -upgrade +``` \ No newline at end of file diff --git a/templates/resources/dbt_git_project_config.md.tmpl b/templates/resources/dbt_git_project_config.md.tmpl index 5a717ffd..b3c5a8b8 100644 --- a/templates/resources/dbt_git_project_config.md.tmpl +++ b/templates/resources/dbt_git_project_config.md.tmpl @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_git_project_config" # Resource: fivetran_dbt_git_project_config -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add and manage dbt Git Projects Configs. diff --git a/templates/resources/dbt_project.md.tmpl b/templates/resources/dbt_project.md.tmpl index b1249ff3..5a1dda23 100644 --- a/templates/resources/dbt_project.md.tmpl +++ b/templates/resources/dbt_project.md.tmpl @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_project" # Resource: fivetran_dbt_project -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add, manage and delete dbt Projects in your account. diff --git a/templates/resources/dbt_transformation.md.tmpl b/templates/resources/dbt_transformation.md.tmpl index b8a3ef04..ff02d565 100644 --- a/templates/resources/dbt_transformation.md.tmpl +++ b/templates/resources/dbt_transformation.md.tmpl @@ -4,7 +4,7 @@ page_title: "Resource: fivetran_dbt_transformation" # Resource: fivetran_dbt_transformation -Resource is in ALPHA state. +This resource is Deprecated, please follow the 1.5.0 migration guide to update the schema. This resource allows you to add, manage and delete dbt Transformations for existing dbt Model. To retrieve available dbt Models use this [Retrieve dbt Project models](https://fivetran.com/docs/rest-api/dbt-transformation-management#retrievedbtprojectmodels) endpoint. diff --git a/templates/resources/transformation.md.tmpl b/templates/resources/transformation.md.tmpl index 275f60dc..aac6674c 100644 --- a/templates/resources/transformation.md.tmpl +++ b/templates/resources/transformation.md.tmpl @@ -8,24 +8,65 @@ Resource is in ALPHA state. This resource allows you to add, manage and delete transformation projects in your account. -## Example Usage +## Example Usage for dbt Core Transformation ```hcl resource "fivetran_transformation" "transformation" { provider = fivetran-provider - group_id = "group_id" - type = "DBT_GIT" - run_tests = true - - project_config { - git_remote_url = "git_remote_url" - git_branch = "git_branch" - folder_path = "folder_path" - dbt_version = "dbt_version" - default_schema = "default_schema" - threads = 0 - target_name = "target_name" - environment_vars = ["environment_var"] + + type = "DBT_CORE" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + project_id = "project_id" + name = "name" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name2" + command = "command2" + } + ] + } +} +``` + +## Example Usage for Quickstart Transformation + +```hcl +resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "QUICKSTART" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + package_name = "package_name" + connection_ids = ["connection_id1", "connection_id2"] + excluded_models = ["excluded_model1", "excluded_model2"] } } ``` From b12c3b9627aac18343238858107f05163de4828e Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Thu, 23 Jan 2025 23:15:00 +0100 Subject: [PATCH 08/13] changelog --- CHANGELOG.md | 12 ++++++++++++ fivetran/framework/provider.go | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9fe3190..e59edead 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,10 +9,22 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.5.0](https://github.com/fivetran/terraform-provider-fivetran/compare/v1.4.2...v1.5.0) +## Added +Support for a new [Transformation Management API](https://fivetran.com/docs/rest-api/api-reference/transformation-management) +- New resource `fivetran_transformation_project` instead of deprecated `fivetran_dbt_project` +- New resource `fivetran_transformation` instead of deprecated `fivetran_dbt_transformation` +- New data source `fivetran_transformation_project` instead of deprecated `fivetran_dbt_project` +- New data source `fivetran_transformation_projects` instead of deprecated `fivetran_dbt_projects` +- New data source `fivetran_transformation` instead of deprecated `fivetran_dbt_transformation` + - New data source `fivetran_connectors` that allows to retrieve the list of existing Connections available for the current account. - New data source `fivetran_destinations` that allows to retrieve the list of existing Destinations available for the current account. - New data source `fivetran_external_logs` that allows to retrieve the list of existing External Logging Services available for the current account. +## Deprecated +- Datasources `fivetran_dbt_project`, `fivetran_dbt_projects`, `fivetran_dbt_transformation`, `fivetran_dbt_models` +- Resources `fivetran_dbt_project`, `fivetran_dbt_transformation` + ## [1.4.2](https://github.com/fivetran/terraform-provider-fivetran/compare/v1.4.1...v1.4.2) ## Added diff --git a/fivetran/framework/provider.go b/fivetran/framework/provider.go index 808741ce..2fc83d91 100644 --- a/fivetran/framework/provider.go +++ b/fivetran/framework/provider.go @@ -17,7 +17,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) -const Version = "1.4.3" // Current provider version +const Version = "1.5.0" // Current provider version type fivetranProvider struct { mockClient httputils.HttpClient From 6b263348ed0d961086ea9e5c50710b90abfcd900 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Mon, 27 Jan 2025 12:41:54 +0100 Subject: [PATCH 09/13] address comments --- .../framework/core/schema/transformation.go | 6 + .../framework/resources/transformation.go | 159 +++++++++++++----- .../data-sources/quickstart_packages.md.tmpl | 3 +- 3 files changed, 126 insertions(+), 42 deletions(-) diff --git a/fivetran/framework/core/schema/transformation.go b/fivetran/framework/core/schema/transformation.go index 05b8a7c4..7aa67895 100644 --- a/fivetran/framework/core/schema/transformation.go +++ b/fivetran/framework/core/schema/transformation.go @@ -5,6 +5,9 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types/basetypes" datasourceSchema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" resourceSchema "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" ) func TransformationResource() resourceSchema.Schema { @@ -158,6 +161,7 @@ func transformationConfigDatasourceSchema() map[string]datasourceSchema.Attribut func transformationConfigResourceSchema() map[string]resourceSchema.Attribute { return map[string]resourceSchema.Attribute{ "project_id": resourceSchema.StringAttribute{ + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplace()}, Optional: true, Description: "The unique identifier for the dbt Core project within the Fivetran system", }, @@ -166,10 +170,12 @@ func transformationConfigResourceSchema() map[string]resourceSchema.Attribute { Description: "The transformation name", }, "package_name": resourceSchema.StringAttribute{ + PlanModifiers: []planmodifier.String{stringplanmodifier.RequiresReplace()}, Optional: true, Description: `The Quickstart transformation package name`, }, "connection_ids": resourceSchema.SetAttribute{ + PlanModifiers: []planmodifier.Set{setplanmodifier.RequiresReplace()}, Optional: true, ElementType: basetypes.StringType{}, Description: "The list of the connection identifiers to be used for the integrated schedule. Also used to identify package_name automatically if package_name was not specified", diff --git a/fivetran/framework/resources/transformation.go b/fivetran/framework/resources/transformation.go index 23f5ed94..44b5465b 100644 --- a/fivetran/framework/resources/transformation.go +++ b/fivetran/framework/resources/transformation.go @@ -56,33 +56,114 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, return } + transformationType := data.ProjectType.ValueString() client := r.GetClient() svc := client.NewTransformationCreate() - svc.ProjectType(data.ProjectType.ValueString()) + svc.ProjectType(transformationType) svc.Paused(data.Paused.ValueBool()) if !data.Config.IsNull() && !data.Config.IsUnknown() { config := fivetran.NewTransformationConfig() configAttributes := data.Config.Attributes() + /* DBT_CORE */ if !configAttributes["project_id"].(basetypes.StringValue).IsNull() && !configAttributes["project_id"].(basetypes.StringValue).IsUnknown() { - config.ProjectId(configAttributes["project_id"].(basetypes.StringValue).ValueString()) + if transformationType != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "project_id"), + ) + return + } + + config.ProjectId(configAttributes["project_id"].(basetypes.StringValue).ValueString()) } + if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { + if transformationType != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "name"), + ) + return + } + config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) + } + + if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { + if transformationType != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "steps"), + ) + return + } + + evars := []transformations.TransformationStep{} + + for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { + if element, ok := ev.(basetypes.ObjectValue); ok { + step := transformations.TransformationStep{} + step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() + step.Command = element.Attributes()["command"].(basetypes.StringValue).ValueString() + evars = append(evars, step) + } + } + + config.Steps(evars) } + + /* QUICKSTART */ + packageName := "" if !configAttributes["package_name"].(basetypes.StringValue).IsNull() && !configAttributes["package_name"].(basetypes.StringValue).IsUnknown() { - config.PackageName(configAttributes["package_name"].(basetypes.StringValue).ValueString()) + if transformationType != "QUICKSTART" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "package_name"), + ) + return + } + + packageName = configAttributes["package_name"].(basetypes.StringValue).ValueString() + + config.PackageName(packageName) } + connectionIds := []string{} if !configAttributes["connection_ids"].IsUnknown() && !configAttributes["connection_ids"].IsNull() { - evars := []string{} + if transformationType != "QUICKSTART" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "connection_ids"), + ) + return + } + for _, ev := range configAttributes["connection_ids"].(basetypes.SetValue).Elements() { - evars = append(evars, ev.(basetypes.StringValue).ValueString()) + connectionIds = append(connectionIds, ev.(basetypes.StringValue).ValueString()) } - config.ConnectionIds(evars) + + + config.ConnectionIds(connectionIds) + } + + if len(connectionIds) == 0 && packageName == "" && transformationType == "QUICKSTART" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("For a QUICKSTART type transformation, at least one of the `%v` or `%v` parameters must be set.", "package_name", "connection_ids"), + ) + return } if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { + if transformationType != "QUICKSTART" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "excluded_models"), + ) + return + } + evars := []string{} for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) @@ -90,19 +171,6 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, config.ExcludedModels(evars) } - if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { - evars := []transformations.TransformationStep{} - for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { - if element, ok := ev.(basetypes.ObjectValue); ok { - step := transformations.TransformationStep{} - step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() - step.Command = element.Attributes()["command"].(basetypes.StringValue).ValueString() - evars = append(evars, step) - } - } - config.Steps(evars) - } - svc.TransformationConfig(config) } @@ -237,33 +305,28 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, if !plan.Config.IsNull() && !plan.Config.IsUnknown() { config := fivetran.NewTransformationConfig() configAttributes := plan.Config.Attributes() - if !configAttributes["project_id"].(basetypes.StringValue).IsNull() && !configAttributes["project_id"].(basetypes.StringValue).IsUnknown() { - config.ProjectId(configAttributes["project_id"].(basetypes.StringValue).ValueString()) - } - if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { - config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) - } - if !configAttributes["package_name"].(basetypes.StringValue).IsNull() && !configAttributes["package_name"].(basetypes.StringValue).IsUnknown() { - config.PackageName(configAttributes["package_name"].(basetypes.StringValue).ValueString()) - } - if !configAttributes["connection_ids"].IsUnknown() && !configAttributes["connection_ids"].IsNull() { - evars := []string{} - for _, ev := range configAttributes["connection_ids"].(basetypes.SetValue).Elements() { - evars = append(evars, ev.(basetypes.StringValue).ValueString()) + if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { + if state.ProjectType.ValueString() != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "name"), + ) + return } - config.ConnectionIds(evars) - } - if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { - evars := []string{} - for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { - evars = append(evars, ev.(basetypes.StringValue).ValueString()) - } - config.ExcludedModels(evars) + config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) } if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { + if state.ProjectType.ValueString() != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "steps"), + ) + return + } + evars := []transformations.TransformationStep{} for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { if element, ok := ev.(basetypes.ObjectValue); ok { @@ -276,6 +339,22 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, config.Steps(evars) } + if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { + if state.ProjectType.ValueString() != "QUICKSTART" { + resp.Diagnostics.AddError( + "Unable to Create Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "excluded_models"), + ) + return + } + + evars := []string{} + for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { + evars = append(evars, ev.(basetypes.StringValue).ValueString()) + } + config.ExcludedModels(evars) + } + svc.TransformationConfig(config) } diff --git a/templates/data-sources/quickstart_packages.md.tmpl b/templates/data-sources/quickstart_packages.md.tmpl index 3a7ff2e2..c9557d2a 100644 --- a/templates/data-sources/quickstart_packages.md.tmpl +++ b/templates/data-sources/quickstart_packages.md.tmpl @@ -9,8 +9,7 @@ Returns a list of available Quickstart transformation package metadata details ## Example Usage ```hcl -data "fivetran_quickstart_packages" "test" { - id = "id" +data "fivetran_quickstart_packages" "all_packages_metadata" { } ``` From 2b06f28df01578fffacd25cd452e2322b1f15f40 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev <125960514+fivetran-aleksandrboldyrev@users.noreply.github.com> Date: Mon, 27 Jan 2025 15:18:27 +0100 Subject: [PATCH 10/13] Apply suggestions from code review Co-authored-by: Vitaly Mosin <73104048+beevital@users.noreply.github.com> --- templates/resources/transformation.md.tmpl | 10 +++------- templates/resources/transformation_project.md.tmpl | 4 ++-- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/templates/resources/transformation.md.tmpl b/templates/resources/transformation.md.tmpl index aac6674c..d3680971 100644 --- a/templates/resources/transformation.md.tmpl +++ b/templates/resources/transformation.md.tmpl @@ -54,13 +54,9 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["cron1","cron2"] - interval = 601 - smart_syncing = true - connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "schedule_type1" - days_of_week = ["days_of_week1","days_of_week2"] - time_of_day = "time_of_day1" + schedule_type = "TIME_OF_DAY" + days_of_week = ["MONDAY", "FRIDAY"] + time_of_day = "11:00" } transformation_config { diff --git a/templates/resources/transformation_project.md.tmpl b/templates/resources/transformation_project.md.tmpl index 50d69ca7..996d32f1 100644 --- a/templates/resources/transformation_project.md.tmpl +++ b/templates/resources/transformation_project.md.tmpl @@ -23,9 +23,9 @@ resource "fivetran_transformation_project" "project" { folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" - threads = 0 + threads = 1 target_name = "target_name" - environment_vars = ["environment_var"] + environment_vars = ["DBT_VARIABLE=variable_value"] } } ``` From 6ac26aec165e779feff895ffa409db742cfaee51 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Mon, 27 Jan 2025 17:07:20 +0100 Subject: [PATCH 11/13] fix comments --- docs/data-sources/quickstart_packages.md | 3 +- docs/data-sources/transformation.md | 16 +- docs/data-sources/transformations.md | 16 +- docs/resources/transformation.md | 64 +++-- docs/resources/transformation_project.md | 4 +- .../core/model/transformation_project.go | 50 +++- .../framework/core/schema/transformation.go | 20 +- .../core/schema/transformation_project.go | 2 +- .../framework/resources/transformation.go | 157 +++++++---- .../resources/transformation_project.go | 58 +++- .../resources/transformation_project_test.go | 102 ++++++- .../resources/transformation_test.go | 258 +++++++++++++++++- templates/resources/transformation.md.tmpl | 40 ++- 13 files changed, 661 insertions(+), 129 deletions(-) diff --git a/docs/data-sources/quickstart_packages.md b/docs/data-sources/quickstart_packages.md index 2bdd1d78..d00d1004 100644 --- a/docs/data-sources/quickstart_packages.md +++ b/docs/data-sources/quickstart_packages.md @@ -9,8 +9,7 @@ Returns a list of available Quickstart transformation package metadata details ## Example Usage ```hcl -data "fivetran_quickstart_packages" "test" { - id = "id" +data "fivetran_quickstart_packages" "all_packages_metadata" { } ``` diff --git a/docs/data-sources/transformation.md b/docs/data-sources/transformation.md index 6c255496..a291c733 100644 --- a/docs/data-sources/transformation.md +++ b/docs/data-sources/transformation.md @@ -19,7 +19,7 @@ data "fivetran_transformation" "test" { ### Required -- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. +- `id` (String) The unique identifier for the Transformation within the Fivetran system. ### Read-Only @@ -30,20 +30,20 @@ data "fivetran_transformation" "test" { - `schedule` (Block, Read-only) (see [below for nested schema](#nestedblock--schedule)) - `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). - `transformation_config` (Block, Read-only) (see [below for nested schema](#nestedblock--transformation_config)) -- `type` (String) Transformation type. +- `type` (String) Transformation type. The following values are supported: DBT_CORE, QUICKSTART. ### Nested Schema for `schedule` Read-Only: -- `connection_ids` (Set of String) Identifiers of related connectors. -- `cron` (Set of String) Cron schedule: list of CRON strings. -- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. -- `interval` (Number) The time interval in minutes between subsequent transformation runs. -- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Not expected for QUICKSTART transformations +- `cron` (Set of String) Cron schedule: list of CRON strings. Used for for CRON schedule type +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. Used for for INTEGRATED schedule type +- `interval` (Number) The time interval in minutes between subsequent transformation runs. Used for for INTERVAL schedule type +- `schedule_type` (String) The type of the schedule to run the Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL, CRON. - `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule -- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00". Used for for TIME_OF_DAY schedule type diff --git a/docs/data-sources/transformations.md b/docs/data-sources/transformations.md index f0863657..a6fc3892 100644 --- a/docs/data-sources/transformations.md +++ b/docs/data-sources/transformations.md @@ -25,7 +25,7 @@ data "fivetran_transformations" "test" { Required: -- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. +- `id` (String) The unique identifier for the Transformation within the Fivetran system. Read-Only: @@ -36,20 +36,20 @@ Read-Only: - `schedule` (Block, Read-only) (see [below for nested schema](#nestedblock--transformations--schedule)) - `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). - `transformation_config` (Block, Read-only) (see [below for nested schema](#nestedblock--transformations--transformation_config)) -- `type` (String) Transformation type. +- `type` (String) Transformation type. The following values are supported: DBT_CORE, QUICKSTART. ### Nested Schema for `transformations.schedule` Read-Only: -- `connection_ids` (Set of String) Identifiers of related connectors. -- `cron` (Set of String) Cron schedule: list of CRON strings. -- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. -- `interval` (Number) The time interval in minutes between subsequent transformation runs. -- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Not expected for QUICKSTART transformations +- `cron` (Set of String) Cron schedule: list of CRON strings. Used for for CRON schedule type +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. Used for for INTEGRATED schedule type +- `interval` (Number) The time interval in minutes between subsequent transformation runs. Used for for INTERVAL schedule type +- `schedule_type` (String) The type of the schedule to run the Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL, CRON. - `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule -- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00". Used for for TIME_OF_DAY schedule type diff --git a/docs/resources/transformation.md b/docs/resources/transformation.md index 1eb63d09..812b96c5 100644 --- a/docs/resources/transformation.md +++ b/docs/resources/transformation.md @@ -18,13 +18,13 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["cron1","cron2"] + cron = ["0 */1 * * *"] interval = 601 smart_syncing = true connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "schedule_type1" - days_of_week = ["days_of_week1","days_of_week2"] - time_of_day = "time_of_day1" + schedule_type = "INTEGRATED" + days_of_week = ["MONDAY", "FRIDAY"] + time_of_day = "14:00" } transformation_config { @@ -54,13 +54,8 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["cron1","cron2"] - interval = 601 - smart_syncing = true - connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "schedule_type1" - days_of_week = ["days_of_week1","days_of_week2"] - time_of_day = "time_of_day1" + schedule_type = "TIME_OF_DAY" + time_of_day = "11:00" } transformation_config { @@ -71,6 +66,37 @@ resource "fivetran_transformation" "transformation" { } ``` +## Example Usages for Transformation Schedule section + +```hcl +schedule { + schedule_type = "TIME_OF_DAY" + days_of_week = ["MONDAY", "FRIDAY"] + time_of_day = "11:00" +} +``` + +```hcl +schedule { + schedule_type = "INTEGRATED" + connection_ids = ["connection_id1", "connection_id2"] +} +``` + +```hcl +schedule { + schedule_type = "INTERVAL" + interval = 601 +} +``` + +```hcl +schedule { + schedule_type = "CRON" + cron = ["0 */1 * * *"] +} +``` + ## Schema @@ -79,13 +105,13 @@ resource "fivetran_transformation" "transformation" { - `paused` (Boolean) The field indicating whether the transformation will be set into the paused state. By default, the value is false. - `schedule` (Block, Optional) (see [below for nested schema](#nestedblock--schedule)) - `transformation_config` (Block, Optional) (see [below for nested schema](#nestedblock--transformation_config)) -- `type` (String) Transformation type. +- `type` (String) Transformation type. The following values are supported: DBT_CORE, QUICKSTART. ### Read-Only - `created_at` (String) The timestamp of when the transformation was created in your account. - `created_by_id` (String) The unique identifier for the User within the Fivetran system who created the transformation. -- `id` (String) The unique identifier for the dbt Transformation within the Fivetran system. +- `id` (String) The unique identifier for the Transformation within the Fivetran system. - `output_model_names` (Set of String) Identifiers of related models. - `status` (String) Status of transformation Project (NOT_READY, READY, ERROR). @@ -94,13 +120,13 @@ resource "fivetran_transformation" "transformation" { Optional: -- `connection_ids` (Set of String) Identifiers of related connectors. -- `cron` (Set of String) Cron schedule: list of CRON strings. -- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. -- `interval` (Number) The time interval in minutes between subsequent transformation runs. -- `schedule_type` (String) The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value. +- `connection_ids` (Set of String) The list of the connection identifiers to be used for the integrated schedule. Not expected for QUICKSTART transformations +- `cron` (Set of String) Cron schedule: list of CRON strings. Used for for CRON schedule type +- `days_of_week` (Set of String) The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. Used for for INTEGRATED schedule type +- `interval` (Number) The time interval in minutes between subsequent transformation runs. Used for for INTERVAL schedule type +- `schedule_type` (String) The type of the schedule to run the Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL, CRON. - `smart_syncing` (Boolean) The boolean flag that enables the Smart Syncing schedule -- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00" +- `time_of_day` (String) The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00". Used for for TIME_OF_DAY schedule type diff --git a/docs/resources/transformation_project.md b/docs/resources/transformation_project.md index 591e5e3c..51981d4b 100644 --- a/docs/resources/transformation_project.md +++ b/docs/resources/transformation_project.md @@ -23,9 +23,9 @@ resource "fivetran_transformation_project" "project" { folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" - threads = 0 + threads = 1 target_name = "target_name" - environment_vars = ["environment_var"] + environment_vars = ["DBT_VARIABLE=variable_value"] } } ``` diff --git a/fivetran/framework/core/model/transformation_project.go b/fivetran/framework/core/model/transformation_project.go index 46dc6c93..e2ef282b 100644 --- a/fivetran/framework/core/model/transformation_project.go +++ b/fivetran/framework/core/model/transformation_project.go @@ -63,15 +63,49 @@ func (d *TransformationResourceProject) ReadFromResponse(ctx context.Context, re "threads": types.Int64Type, } projectConfigItems := map[string]attr.Value{} - projectConfigItems["dbt_version"] = types.StringValue(resp.Data.ProjectConfig.DbtVersion) - projectConfigItems["default_schema"] = types.StringValue(resp.Data.ProjectConfig.DefaultSchema) - projectConfigItems["git_remote_url"] = types.StringValue(resp.Data.ProjectConfig.GitRemoteUrl) - projectConfigItems["folder_path"] = types.StringValue(resp.Data.ProjectConfig.FolderPath) - projectConfigItems["git_branch"] = types.StringValue(resp.Data.ProjectConfig.GitBranch) - projectConfigItems["target_name"] = types.StringValue(resp.Data.ProjectConfig.TargetName) - projectConfigItems["public_key"] = types.StringValue(resp.Data.ProjectConfig.PublicKey) - projectConfigItems["threads"] = types.Int64Value(int64(resp.Data.ProjectConfig.Threads)) + if resp.Data.ProjectConfig.DbtVersion != "" { + projectConfigItems["dbt_version"] = types.StringValue(resp.Data.ProjectConfig.DbtVersion) + } else { + projectConfigItems["dbt_version"] = types.StringNull() + } + + if resp.Data.ProjectConfig.DefaultSchema != "" { + projectConfigItems["default_schema"] = types.StringValue(resp.Data.ProjectConfig.DefaultSchema) + } else { + projectConfigItems["default_schema"] = types.StringNull() + } + + if resp.Data.ProjectConfig.GitRemoteUrl != "" { + projectConfigItems["git_remote_url"] = types.StringValue(resp.Data.ProjectConfig.GitRemoteUrl) + } else { + projectConfigItems["git_remote_url"] = types.StringNull() + } + + if resp.Data.ProjectConfig.FolderPath != "" { + projectConfigItems["folder_path"] = types.StringValue(resp.Data.ProjectConfig.FolderPath) + } else { + projectConfigItems["folder_path"] = types.StringNull() + } + + if resp.Data.ProjectConfig.GitBranch != "" { + projectConfigItems["git_branch"] = types.StringValue(resp.Data.ProjectConfig.GitBranch) + } else { + projectConfigItems["git_branch"] = types.StringNull() + } + if resp.Data.ProjectConfig.TargetName != "" { + projectConfigItems["target_name"] = types.StringValue(resp.Data.ProjectConfig.TargetName) + } else { + projectConfigItems["target_name"] = types.StringNull() + } + + if resp.Data.ProjectConfig.PublicKey != "" { + projectConfigItems["public_key"] = types.StringValue(resp.Data.ProjectConfig.PublicKey) + } else { + projectConfigItems["public_key"] = types.StringNull() + } + + projectConfigItems["threads"] = types.Int64Value(int64(resp.Data.ProjectConfig.Threads)) envVars := []attr.Value{} for _, el := range resp.Data.ProjectConfig.EnvironmentVars { envVars = append(envVars, types.StringValue(el)) diff --git a/fivetran/framework/core/schema/transformation.go b/fivetran/framework/core/schema/transformation.go index 7aa67895..2a1668de 100644 --- a/fivetran/framework/core/schema/transformation.go +++ b/fivetran/framework/core/schema/transformation.go @@ -43,15 +43,15 @@ func transformationSchema() core.Schema { "id": { ValueType: core.String, IsId: true, - Description: "The unique identifier for the dbt Transformation within the Fivetran system.", + Description: "The unique identifier for the Transformation within the Fivetran system.", }, "paused": { ValueType: core.Boolean, Description: "The field indicating whether the transformation will be set into the paused state. By default, the value is false.", }, "type": { - ValueType: core.String, - Description: "Transformation type.", + ValueType: core.StringEnum, + Description: "Transformation type. The following values are supported: DBT_CORE, QUICKSTART.", }, "created_at": { ValueType: core.String, @@ -81,24 +81,24 @@ func transformationScheduleSchema() core.Schema { return core.Schema{ Fields: map[string]core.SchemaField{ "schedule_type": { - ValueType: core.String, - Description: "The type of the schedule to run the dbt Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL. For INTEGRATED schedule type, interval and time_of_day values are ignored and only the days_of_week parameter values are taken into account (but may be empty or null). For TIME_OF_DAY schedule type, the interval parameter value is ignored and the time_of_day values is taken into account along with days_of_week value. For INTERVAL schedule type, time_of_day value is ignored and the interval parameter value is taken into account along with days_of_week value.", + ValueType: core.StringEnum, + Description: "The type of the schedule to run the Transformation on. The following values are supported: INTEGRATED, TIME_OF_DAY, INTERVAL, CRON.", }, "days_of_week": { ValueType: core.StringsSet, - Description: "The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY.", + Description: "The set of the days of the week the transformation should be launched on. The following values are supported: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. Used for for INTEGRATED schedule type", }, "interval": { ValueType: core.Integer, - Description: "The time interval in minutes between subsequent transformation runs.", + Description: "The time interval in minutes between subsequent transformation runs. Used for for INTERVAL schedule type", }, "time_of_day": { ValueType: core.String, - Description: `The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00"`, + Description: `The time of the day the transformation should be launched at. Supported values are: "00:00", "01:00", "02:00", "03:00", "04:00", "05:00", "06:00", "07:00", "08:00", "09:00", "10:00", "11:00", "12:00", "13:00", "14:00", "15:00", "16:00", "17:00", "18:00", "19:00", "20:00", "21:00", "22:00", "23:00". Used for for TIME_OF_DAY schedule type `, }, "connection_ids": { ValueType: core.StringsSet, - Description: "Identifiers of related connectors.", + Description: "The list of the connection identifiers to be used for the integrated schedule. Not expected for QUICKSTART transformations", }, "smart_syncing": { ValueType: core.Boolean, @@ -106,7 +106,7 @@ func transformationScheduleSchema() core.Schema { }, "cron": { ValueType: core.StringsSet, - Description: "Cron schedule: list of CRON strings.", + Description: "Cron schedule: list of CRON strings. Used for for CRON schedule type", }, }, } diff --git a/fivetran/framework/core/schema/transformation_project.go b/fivetran/framework/core/schema/transformation_project.go index 28015ef0..1ef8ad9d 100644 --- a/fivetran/framework/core/schema/transformation_project.go +++ b/fivetran/framework/core/schema/transformation_project.go @@ -81,7 +81,7 @@ func transformationProjectSchema() core.Schema { "type": { Required: true, ForceNew: true, - ValueType: core.String, + ValueType: core.StringEnum, Description: "Transformation project type.", }, "status": { diff --git a/fivetran/framework/resources/transformation.go b/fivetran/framework/resources/transformation.go index 44b5465b..48f20503 100644 --- a/fivetran/framework/resources/transformation.go +++ b/fivetran/framework/resources/transformation.go @@ -70,7 +70,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "DBT_CORE" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "project_id"), + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "transformation_config.project_id"), ) return } @@ -82,7 +82,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "DBT_CORE" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "name"), + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "transformation_config.name"), ) return } @@ -94,7 +94,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "DBT_CORE" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "steps"), + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "transformation_config.steps"), ) return } @@ -119,7 +119,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "QUICKSTART" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "package_name"), + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "transformation_config.package_name"), ) return } @@ -134,7 +134,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "QUICKSTART" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "connection_ids"), + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "transformation_config.connection_ids"), ) return } @@ -150,7 +150,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if len(connectionIds) == 0 && packageName == "" && transformationType == "QUICKSTART" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("For a QUICKSTART type transformation, at least one of the `%v` or `%v` parameters must be set.", "package_name", "connection_ids"), + fmt.Sprintf("For a QUICKSTART type transformation, at least one of the `%v` or `%v` parameters must be set.", "transformation_config.package_name", "transformation_config.connection_ids"), ) return } @@ -159,7 +159,7 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, if transformationType != "QUICKSTART" { resp.Diagnostics.AddError( "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "excluded_models"), + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "transformation_config.excluded_models"), ) return } @@ -178,20 +178,28 @@ func (r *transformation) Create(ctx context.Context, req resource.CreateRequest, schedule := fivetran.NewTransformationSchedule() scheduleAttributes := data.Schedule.Attributes() - if !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsNull() && !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsUnknown() { + if !scheduleAttributes["time_of_day"].IsNull() && !scheduleAttributes["time_of_day"].IsUnknown() { schedule.TimeOfDay(scheduleAttributes["time_of_day"].(basetypes.StringValue).ValueString()) } - if !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsNull() && !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsUnknown() { + if !scheduleAttributes["schedule_type"].IsNull() && !scheduleAttributes["schedule_type"].IsUnknown() { schedule.ScheduleType(scheduleAttributes["schedule_type"].(basetypes.StringValue).ValueString()) } - if !scheduleAttributes["interval"].(basetypes.Int64Value).IsNull() && !scheduleAttributes["interval"].(basetypes.Int64Value).IsUnknown() { + if !scheduleAttributes["interval"].IsNull() && !scheduleAttributes["interval"].IsUnknown() { schedule.Interval(int(scheduleAttributes["interval"].(basetypes.Int64Value).ValueInt64())) } - if !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsNull() && !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsUnknown() { + if !scheduleAttributes["smart_syncing"].IsNull() && !scheduleAttributes["smart_syncing"].IsUnknown() { schedule.SmartSyncing(scheduleAttributes["smart_syncing"].(basetypes.BoolValue).ValueBool()) } if !scheduleAttributes["connection_ids"].IsUnknown() && !scheduleAttributes["connection_ids"].IsNull() { + if transformationType != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Update Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "schedule.connection_ids"), + ) + return + } + evars := []string{} for _, ev := range scheduleAttributes["connection_ids"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) @@ -299,36 +307,49 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, return } - svc := r.GetClient().NewTransformationUpdate() - svc.Paused(plan.Paused.ValueBool()) + svc := r.GetClient().NewTransformationUpdate().TransformationId(state.Id.ValueString()) - if !plan.Config.IsNull() && !plan.Config.IsUnknown() { + pausedPlan := core.GetBoolOrDefault(plan.Paused, true) + pausedState := core.GetBoolOrDefault(state.Paused, true) + + if pausedPlan != pausedState { + svc.Paused(pausedPlan) + } + + if !plan.Config.IsNull() && !plan.Config.IsUnknown() && !plan.Config.Equal(state.Config) { + hasChanges := false config := fivetran.NewTransformationConfig() - configAttributes := plan.Config.Attributes() + configPlanAttributes := plan.Config.Attributes() + configStateAttributes := state.Config.Attributes() - if !configAttributes["name"].(basetypes.StringValue).IsNull() && !configAttributes["name"].(basetypes.StringValue).IsUnknown() { + if !configPlanAttributes["name"].IsNull() && + !configPlanAttributes["name"].IsUnknown() && + !configStateAttributes["name"].(basetypes.StringValue).Equal(configPlanAttributes["name"].(basetypes.StringValue)) { if state.ProjectType.ValueString() != "DBT_CORE" { resp.Diagnostics.AddError( - "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "name"), + "Unable to Update Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "transformation_config.name"), ) return } - config.Name(configAttributes["name"].(basetypes.StringValue).ValueString()) + hasChanges = true + config.Name(configPlanAttributes["name"].(basetypes.StringValue).ValueString()) } - if !configAttributes["steps"].IsUnknown() && !configAttributes["steps"].IsNull() { + if !configPlanAttributes["steps"].IsUnknown() && + !configPlanAttributes["steps"].IsNull() && + !configStateAttributes["steps"].(basetypes.ListValue).Equal(configPlanAttributes["steps"].(basetypes.ListValue)) { if state.ProjectType.ValueString() != "DBT_CORE" { resp.Diagnostics.AddError( - "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "steps"), + "Unable to Update Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "transformation_config.steps"), ) return } evars := []transformations.TransformationStep{} - for _, ev := range configAttributes["steps"].(basetypes.ListValue).Elements() { + for _, ev := range configPlanAttributes["steps"].(basetypes.ListValue).Elements() { if element, ok := ev.(basetypes.ObjectValue); ok { var step transformations.TransformationStep step.Name = element.Attributes()["name"].(basetypes.StringValue).ValueString() @@ -336,70 +357,114 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, evars = append(evars, step) } } + + hasChanges = true config.Steps(evars) } - if !configAttributes["excluded_models"].IsUnknown() && !configAttributes["excluded_models"].IsNull() { + if !configPlanAttributes["excluded_models"].IsUnknown() && + !configPlanAttributes["excluded_models"].IsNull() && + !configStateAttributes["excluded_models"].(basetypes.SetValue).Equal(configPlanAttributes["excluded_models"].(basetypes.SetValue)) { if state.ProjectType.ValueString() != "QUICKSTART" { resp.Diagnostics.AddError( - "Unable to Create Transformation Resource.", - fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "excluded_models"), + "Unable to Update Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for QUICKSTART type transformation", "transformation_config.excluded_models"), ) return } evars := []string{} - for _, ev := range configAttributes["excluded_models"].(basetypes.SetValue).Elements() { + for _, ev := range configPlanAttributes["excluded_models"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) } + + hasChanges = true config.ExcludedModels(evars) } - svc.TransformationConfig(config) + if hasChanges { + svc.TransformationConfig(config) + } } - if !plan.Schedule.IsNull() && !plan.Schedule.IsUnknown() { + if !plan.Schedule.IsNull() && !plan.Schedule.IsUnknown() && !plan.Schedule.Equal(state.Schedule) { + hasChanges := false schedule := fivetran.NewTransformationSchedule() - scheduleAttributes := plan.Schedule.Attributes() - - if !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsNull() && !scheduleAttributes["time_of_day"].(basetypes.StringValue).IsUnknown() { - schedule.TimeOfDay(scheduleAttributes["time_of_day"].(basetypes.StringValue).ValueString()) + schedulePlanAttributes := plan.Schedule.Attributes() + scheduleStateAttributes := state.Schedule.Attributes() + + if !schedulePlanAttributes["time_of_day"].IsNull() && + !schedulePlanAttributes["time_of_day"].IsUnknown() && + !scheduleStateAttributes["time_of_day"].(basetypes.StringValue).Equal(schedulePlanAttributes["time_of_day"].(basetypes.StringValue)) { + hasChanges = true + schedule.TimeOfDay(schedulePlanAttributes["time_of_day"].(basetypes.StringValue).ValueString()) } - if !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsNull() && !scheduleAttributes["schedule_type"].(basetypes.StringValue).IsUnknown() { - schedule.ScheduleType(scheduleAttributes["schedule_type"].(basetypes.StringValue).ValueString()) + + if !schedulePlanAttributes["schedule_type"].IsNull() && + !schedulePlanAttributes["schedule_type"].IsUnknown() && + !scheduleStateAttributes["schedule_type"].(basetypes.StringValue).Equal(schedulePlanAttributes["schedule_type"].(basetypes.StringValue)) { + hasChanges = true + schedule.ScheduleType(schedulePlanAttributes["schedule_type"].(basetypes.StringValue).ValueString()) } - if !scheduleAttributes["interval"].(basetypes.Int64Value).IsNull() && !scheduleAttributes["interval"].(basetypes.Int64Value).IsUnknown() { - schedule.Interval(int(scheduleAttributes["interval"].(basetypes.Int64Value).ValueInt64())) + + if !schedulePlanAttributes["interval"].IsNull() && + !schedulePlanAttributes["interval"].IsUnknown() && + !scheduleStateAttributes["interval"].(basetypes.Int64Value).Equal(schedulePlanAttributes["interval"].(basetypes.Int64Value)) { + hasChanges = true + schedule.Interval(int(schedulePlanAttributes["interval"].(basetypes.Int64Value).ValueInt64())) } - if !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsNull() && !scheduleAttributes["smart_syncing"].(basetypes.BoolValue).IsUnknown() { - schedule.SmartSyncing(scheduleAttributes["smart_syncing"].(basetypes.BoolValue).ValueBool()) + + if !schedulePlanAttributes["smart_syncing"].IsNull() && + !schedulePlanAttributes["smart_syncing"].IsUnknown() && + !scheduleStateAttributes["smart_syncing"].(basetypes.BoolValue).Equal(schedulePlanAttributes["smart_syncing"].(basetypes.BoolValue)) { + hasChanges = true + schedule.SmartSyncing(schedulePlanAttributes["smart_syncing"].(basetypes.BoolValue).ValueBool()) } - if !scheduleAttributes["connection_ids"].IsUnknown() && !scheduleAttributes["connection_ids"].IsNull() { + if !schedulePlanAttributes["connection_ids"].IsUnknown() && + !schedulePlanAttributes["connection_ids"].IsNull() && + !scheduleStateAttributes["connection_ids"].(basetypes.SetValue).Equal(schedulePlanAttributes["connection_ids"].(basetypes.SetValue)) { + if plan.ProjectType.ValueString() != "DBT_CORE" { + resp.Diagnostics.AddError( + "Unable to Update Transformation Resource.", + fmt.Sprintf("The parameter `%v` can be set only for DBT_CORE type transformation", "schedule.connection_ids"), + ) + return + } + evars := []string{} - for _, ev := range scheduleAttributes["connection_ids"].(basetypes.SetValue).Elements() { + for _, ev := range schedulePlanAttributes["connection_ids"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) } + hasChanges = true schedule.ConnectionIds(evars) } - if !scheduleAttributes["days_of_week"].IsUnknown() && !scheduleAttributes["days_of_week"].IsNull() { + if !schedulePlanAttributes["days_of_week"].IsUnknown() && + !schedulePlanAttributes["days_of_week"].IsNull() && + !scheduleStateAttributes["days_of_week"].(basetypes.SetValue).Equal(schedulePlanAttributes["days_of_week"].(basetypes.SetValue)) { evars := []string{} - for _, ev := range scheduleAttributes["days_of_week"].(basetypes.SetValue).Elements() { + for _, ev := range schedulePlanAttributes["days_of_week"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) } + hasChanges = true schedule.DaysOfWeek(evars) } - if !scheduleAttributes["cron"].IsUnknown() && !scheduleAttributes["cron"].IsNull() { + if !schedulePlanAttributes["cron"].IsUnknown() && + !schedulePlanAttributes["cron"].IsNull() && + !scheduleStateAttributes["cron"].(basetypes.SetValue).Equal(schedulePlanAttributes["cron"].(basetypes.SetValue)) { evars := []string{} - for _, ev := range scheduleAttributes["cron"].(basetypes.SetValue).Elements() { + for _, ev := range schedulePlanAttributes["cron"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) } + hasChanges = true schedule.Cron(evars) } - svc.TransformationSchedule(schedule) + if hasChanges { + svc.TransformationSchedule(schedule) + } } updateResponse, err := svc.Do(ctx) diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go index 9850ed59..dc4c91a6 100644 --- a/fivetran/framework/resources/transformation_project.go +++ b/fivetran/framework/resources/transformation_project.go @@ -166,24 +166,64 @@ func (r *transformationProject) Update(ctx context.Context, req resource.UpdateR svc := r.GetClient().NewTransformationProjectUpdate() svc.ProjectId(state.Id.ValueString()) - svc.RunTests(plan.RunTests.ValueBool()) + + runTestsPlan := core.GetBoolOrDefault(plan.RunTests, true) + runTestsState := core.GetBoolOrDefault(state.RunTests, true) + + if runTestsPlan != runTestsState { + svc.RunTests(runTestsPlan) + } if !plan.ProjectConfig.IsUnknown() && !state.ProjectConfig.Equal(plan.ProjectConfig) { + hasChanges := false projectConfig := fivetran.NewTransformationProjectConfig() - projectConfigAttributes := plan.ProjectConfig.Attributes() - projectConfig.FolderPath(projectConfigAttributes["folder_path"].(basetypes.StringValue).ValueString()) - projectConfig.GitBranch(projectConfigAttributes["git_branch"].(basetypes.StringValue).ValueString()) - projectConfig.TargetName(projectConfigAttributes["target_name"].(basetypes.StringValue).ValueString()) - projectConfig.Threads(int(projectConfigAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + configPlanAttributes := plan.ProjectConfig.Attributes() + configStateAttributes := state.ProjectConfig.Attributes() + + fmt.Printf("configPlanAttributes %v\n", configPlanAttributes) + fmt.Printf("configStateAttributes %v\n", configStateAttributes) + if !configPlanAttributes["folder_path"].IsNull() && + !configPlanAttributes["folder_path"].IsUnknown() && + !configStateAttributes["folder_path"].(basetypes.StringValue).Equal(configPlanAttributes["folder_path"].(basetypes.StringValue)) { + hasChanges = true + projectConfig.FolderPath(configPlanAttributes["folder_path"].(basetypes.StringValue).ValueString()) + } - if !projectConfigAttributes["environment_vars"].IsUnknown() && !projectConfigAttributes["environment_vars"].IsNull() { + if !configPlanAttributes["git_branch"].IsNull() && + !configPlanAttributes["git_branch"].IsUnknown() && + !configStateAttributes["git_branch"].(basetypes.StringValue).Equal(configPlanAttributes["git_branch"].(basetypes.StringValue)) { + hasChanges = true + projectConfig.GitBranch(configPlanAttributes["git_branch"].(basetypes.StringValue).ValueString()) + } + + if !configPlanAttributes["target_name"].IsNull() && + !configPlanAttributes["target_name"].IsUnknown() && + !configStateAttributes["target_name"].(basetypes.StringValue).Equal(configPlanAttributes["target_name"].(basetypes.StringValue)) { + hasChanges = true + projectConfig.TargetName(configPlanAttributes["target_name"].(basetypes.StringValue).ValueString()) + } + + if !configPlanAttributes["threads"].IsNull() && + !configPlanAttributes["threads"].IsUnknown() && + !configStateAttributes["threads"].(basetypes.Int64Value).Equal(configPlanAttributes["threads"].(basetypes.Int64Value)) { + hasChanges = true + projectConfig.Threads(int(configPlanAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + } + + if !configPlanAttributes["environment_vars"].IsNull() && + !configPlanAttributes["environment_vars"].IsUnknown() && + !configStateAttributes["environment_vars"].(basetypes.SetValue).Equal(configPlanAttributes["environment_vars"].(basetypes.SetValue)) { evars := []string{} - for _, ev := range projectConfigAttributes["environment_vars"].(basetypes.SetValue).Elements() { + for _, ev := range configPlanAttributes["environment_vars"].(basetypes.SetValue).Elements() { evars = append(evars, ev.(basetypes.StringValue).ValueString()) } + hasChanges = true projectConfig.EnvironmentVars(evars) } - svc.ProjectConfig(projectConfig) + + if hasChanges { + svc.ProjectConfig(projectConfig) + } } projectResponse, err := svc.Do(ctx) diff --git a/fivetran/framework/resources/transformation_project_test.go b/fivetran/framework/resources/transformation_project_test.go index d2cf4d8e..98502b83 100644 --- a/fivetran/framework/resources/transformation_project_test.go +++ b/fivetran/framework/resources/transformation_project_test.go @@ -13,6 +13,7 @@ import ( var ( transformationProjectResourceMockGetHandler *mock.Handler transformationProjectResourceMockPostHandler *mock.Handler + transformationProjectResourceMockPatchHandler *mock.Handler transformationProjectResourceMockDeleteHandler *mock.Handler transformationProjectResourceMockData map[string]interface{} @@ -20,6 +21,39 @@ var ( func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { transformationProjectResponse := ` +{ + "id": "project_id", + "type": "DBT_GIT", + "status": "NOT_READY", + "errors": [ + "string" + ], + "created_at": "created_at", + "group_id": "group_id", + "setup_tests": [ + { + "title": "Test Title", + "status": "FAILED", + "message": "Error message", + "details": "Error details" + } + ], + "created_by_id": "created_by_id", + "project_config": { + "dbt_version": "dbt_version", + "default_schema": "default_schema", + "git_remote_url": "git_remote_url", + "git_branch": "git_branch", + "threads": 0, + "target_name": "target_name", + "environment_vars": [ + "environment_var" + ], + "public_key": "public_key" + } + }` + + transformationProjectPatchedResponse := ` { "id": "project_id", "type": "DBT_GIT", @@ -52,6 +86,7 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { "public_key": "public_key" } }` + tfmock.MockClient().Reset() transformationProjectResourceMockGetHandler = tfmock.MockClient().When(http.MethodGet, "/v1/transformation-projects/project_id").ThenCall( @@ -70,7 +105,6 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { config := body["project_config"].(map[string]interface{}) tfmock.AssertKeyExistsAndHasValue(t, config, "git_remote_url", "git_remote_url") tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch") - tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") tfmock.AssertKeyExistsAndHasValue(t, config, "dbt_version", "dbt_version") tfmock.AssertKeyExistsAndHasValue(t, config, "default_schema", "default_schema") tfmock.AssertKeyExistsAndHasValue(t, config, "target_name", "target_name") @@ -81,6 +115,27 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { }, ) + transformationProjectResourceMockPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/transformation-projects/project_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + + tfmock.AssertKeyDoesNotExist(t, body, "group_id") + tfmock.AssertKeyDoesNotExist(t, body, "type") + + tfmock.AssertKeyExists(t, body, "project_config") + config := body["project_config"].(map[string]interface{}) + tfmock.AssertKeyDoesNotExist(t, config, "git_remote_url") + tfmock.AssertKeyDoesNotExist(t, config, "git_branch") + tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") + tfmock.AssertKeyDoesNotExist(t, config, "dbt_version") + tfmock.AssertKeyDoesNotExist(t, config, "default_schema") + tfmock.AssertKeyDoesNotExist(t, config, "target_name") + + transformationProjectResourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectPatchedResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectResourceMockData), nil + }, + ) + transformationProjectResourceMockDeleteHandler = tfmock.MockClient().When(http.MethodDelete, "/v1/transformation-projects/project_id", ).ThenCall( @@ -103,7 +158,6 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { project_config { git_remote_url = "git_remote_url" git_branch = "git_branch" - folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" threads = 0 @@ -131,6 +185,49 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), + ), + } + + step2 := resource.TestStep{ + Config: ` + resource "fivetran_transformation_project" "project" { + provider = fivetran-provider + group_id = "group_id" + type = "DBT_GIT" + run_tests = true + + project_config { + git_remote_url = "git_remote_url" + git_branch = "git_branch" + folder_path = "folder_path" + dbt_version = "dbt_version" + default_schema = "default_schema" + threads = 0 + target_name = "target_name" + environment_vars = ["environment_var"] + } + }`, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationProjectResourceMockPostHandler.Interactions, 1) + tfmock.AssertEqual(t, transformationProjectResourceMockPatchHandler.Interactions, 1) + tfmock.AssertEqual(t, transformationProjectResourceMockGetHandler.Interactions, 2) + tfmock.AssertNotEmpty(t, transformationProjectResourceMockData) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "group_id", "group_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "type", "DBT_GIT"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.dbt_version", "dbt_version"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.public_key", "public_key"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.default_schema", "default_schema"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.target_name", "target_name"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), ), } @@ -147,6 +244,7 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { }, Steps: []resource.TestStep{ step1, + step2, }, }, ) diff --git a/fivetran/framework/resources/transformation_test.go b/fivetran/framework/resources/transformation_test.go index 62fcfc0c..55e2e1b4 100644 --- a/fivetran/framework/resources/transformation_test.go +++ b/fivetran/framework/resources/transformation_test.go @@ -12,6 +12,8 @@ import ( var ( transformationGitPostHandler *mock.Handler + transformationGitPatchHandler *mock.Handler + transformationQuickstartPatchHandler *mock.Handler transformationQuickstartPostHandler *mock.Handler transformationGitData map[string]interface{} transformationQuickstartData map[string]interface{} @@ -63,7 +65,7 @@ var ( } }` - quickstartResponse = `{ + gitPatchedResponse = `{ "id": "transformation_id", "status": "status", "schedule": { @@ -83,6 +85,83 @@ var ( ], "time_of_day": "time_of_day1" }, + "type": "DBT_CORE", + "paused": true, + "created_at": "created_at", + "output_model_names": [ + "output_model_name1", + "output_model_name2" + ], + "created_by_id": "created_by_id", + "transformation_config": { + "project_id": "project_id", + "name": "name2", + "steps": [ + { + "name": "name1", + "command": "command1" + }, + { + "name": "name3", + "command": "command3" + } + ] + } + }` + + quickstartResponse = `{ + "id": "transformation_id", + "status": "status", + "schedule": { + "cron": [ + "cron1","cron2" + ], + "interval": 601, + "smart_syncing": true, + "schedule_type": "schedule_type1", + "days_of_week": [ + "days_of_week1", + "days_of_week2" + ], + "time_of_day": "time_of_day1" + }, + "type": "QUICKSTART", + "paused": true, + "created_at": "created_at", + "output_model_names": [ + "output_model_name1", + "output_model_name2" + ], + "created_by_id": "created_by_id", + "transformation_config": { + "package_name": "package_name", + "connection_ids": [ + "connection_id1", + "connection_id2" + ], + "excluded_models": [ + "excluded_model1","excluded_model2" + ], + "upgrade_available": true + } + }` + + quickstartPatchedResponse = `{ + "id": "transformation_id", + "status": "status", + "schedule": { + "cron": [ + "cron1","cron2" + ], + "interval": 601, + "smart_syncing": true, + "schedule_type": "schedule_type1", + "days_of_week": [ + "days_of_week1", + "days_of_week2" + ], + "time_of_day": "14:00" + }, "type": "QUICKSTART", "paused": true, "created_at": "created_at", @@ -152,6 +231,29 @@ func setupMockClientTransformationGitResource(t *testing.T) { }, ) + transformationGitPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + tfmock.AssertKeyDoesNotExist(t, body, "type") + tfmock.AssertKeyDoesNotExist(t, body, "paused") + + tfmock.AssertKeyExists(t, body, "transformation_config") + config := body["transformation_config"].(map[string]interface{}) + tfmock.AssertKeyDoesNotExist(t, config, "project_id") + tfmock.AssertKeyExistsAndHasValue(t, config, "name", "name2") + steps := config["steps"].([]interface{}) + tfmock.AssertKeyExistsAndHasValue(t, steps[0].(map[string]interface{}), "name", "name1") + tfmock.AssertKeyExistsAndHasValue(t, steps[0].(map[string]interface{}), "command", "command1") + tfmock.AssertKeyExistsAndHasValue(t, steps[1].(map[string]interface{}), "name", "name3") + tfmock.AssertKeyExistsAndHasValue(t, steps[1].(map[string]interface{}), "command", "command3") + + tfmock.AssertKeyDoesNotExist(t, body, "schedule") + + transformationGitData = tfmock.CreateMapFromJsonString(t, gitPatchedResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationGitData), nil + }, + ) + tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( func(req *http.Request) (*http.Response, error) { tfmock.AssertNotEmpty(t, transformationGitData) @@ -203,11 +305,6 @@ func setupMockClientTransformationQuickstartResource(t *testing.T) { tfmock.AssertEqual(t, cron[0], "cron1") tfmock.AssertEqual(t, cron[1], "cron2") - connectionIds = schedule["connection_ids"].([]interface{}) - tfmock.AssertEqual(t, len(connectionIds), 2) - tfmock.AssertEqual(t, connectionIds[0], "connection_id1") - tfmock.AssertEqual(t, connectionIds[1], "connection_id2") - daysOfWeek := schedule["days_of_week"].([]interface{}) tfmock.AssertEqual(t, len(daysOfWeek), 2) tfmock.AssertEqual(t, daysOfWeek[0], "days_of_week1") @@ -218,6 +315,28 @@ func setupMockClientTransformationQuickstartResource(t *testing.T) { }, ) + transformationQuickstartPatchHandler = tfmock.MockClient().When(http.MethodPatch, "/v1/transformations/transformation_id").ThenCall( + func(req *http.Request) (*http.Response, error) { + body := tfmock.RequestBodyToJson(t, req) + tfmock.AssertKeyDoesNotExist(t, body, "type") + tfmock.AssertKeyDoesNotExist(t, body, "paused") + + tfmock.AssertKeyDoesNotExist(t, body, "transformation_config") + + tfmock.AssertKeyExists(t, body, "schedule") + schedule := body["schedule"].(map[string]interface{}) + tfmock.AssertKeyDoesNotExist(t, schedule, "interval") + tfmock.AssertKeyDoesNotExist(t, schedule, "smart_syncing") + tfmock.AssertKeyDoesNotExist(t, schedule, "schedule_type") + tfmock.AssertKeyExists(t, schedule, "time_of_day") + tfmock.AssertKeyDoesNotExist(t, schedule, "cron") + tfmock.AssertKeyDoesNotExist(t, schedule, "days_of_week") + + transformationQuickstartData = tfmock.CreateMapFromJsonString(t, quickstartPatchedResponse) + return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationQuickstartData), nil + }, + ) + tfmock.MockClient().When(http.MethodGet, "/v1/transformations/transformation_id").ThenCall( func(req *http.Request) (*http.Response, error) { tfmock.AssertNotEmpty(t, transformationQuickstartData) @@ -304,6 +423,73 @@ func TestResourceTransformationGitMock(t *testing.T) { ), } + step2 := resource.TestStep{ + Config: ` + resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "DBT_CORE" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + connection_ids = ["connection_id1", "connection_id2"] + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "time_of_day1" + } + + transformation_config { + project_id = "project_id" + name = "name2" + steps = [ + { + name = "name1" + command = "command1" + }, + { + name = "name3" + command = "command3" + } + ] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationGitPostHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "status", "status"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "type", "DBT_CORE"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.1", "output_model_name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.project_id", "project_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.name", "name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.0.name", "name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.0.command", "command1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.1.name", "name3"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.steps.1.command", "command3"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.interval", "601"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.1", "cron2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day1"), + ), + } + resource.Test( t, resource.TestCase{ @@ -319,6 +505,7 @@ func TestResourceTransformationGitMock(t *testing.T) { Steps: []resource.TestStep{ step1, + step2, }, }, ) @@ -337,7 +524,6 @@ func TestResourceTransformationQuickstartMock(t *testing.T) { cron = ["cron1","cron2"] interval = 601 smart_syncing = true - connection_ids = ["connection_id1", "connection_id2"] schedule_type = "schedule_type1" days_of_week = ["days_of_week1","days_of_week2"] time_of_day = "time_of_day1" @@ -375,14 +561,67 @@ func TestResourceTransformationQuickstartMock(t *testing.T) { resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type1"), resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.0", "cron1"), resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.1", "cron2"), - resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.0", "connection_id1"), - resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.connection_ids.1", "connection_id2"), resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.time_of_day", "time_of_day1"), ), } + step2 := resource.TestStep{ + Config: ` + resource "fivetran_transformation" "transformation" { + provider = fivetran-provider + + type = "QUICKSTART" + paused = true + + schedule { + cron = ["cron1","cron2"] + interval = 601 + smart_syncing = true + schedule_type = "schedule_type1" + days_of_week = ["days_of_week1","days_of_week2"] + time_of_day = "14:00" + } + + transformation_config { + package_name = "package_name" + connection_ids = ["connection_id1", "connection_id2"] + excluded_models = ["excluded_model1", "excluded_model2"] + } + } + `, + + Check: resource.ComposeAggregateTestCheckFunc( + func(s *terraform.State) error { + tfmock.AssertEqual(t, transformationQuickstartPostHandler.Interactions, 1) + return nil + }, + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "id", "transformation_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "status", "status"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_at", "created_at"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "created_by_id", "created_by_id"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "type", "QUICKSTART"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "paused", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.0", "output_model_name1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "output_model_names.1", "output_model_name2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.package_name", "package_name"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.connection_ids.0", "connection_id1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.connection_ids.1", "connection_id2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.excluded_models.0", "excluded_model1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.excluded_models.1", "excluded_model2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "transformation_config.upgrade_available", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.smart_syncing", "true"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.interval", "601"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.schedule_type", "schedule_type1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.0", "cron1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.cron.1", "cron2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.0", "days_of_week1"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.days_of_week.1", "days_of_week2"), + resource.TestCheckResourceAttr("fivetran_transformation.transformation", "schedule.time_of_day", "14:00"), + ), + } + resource.Test( t, resource.TestCase{ @@ -397,6 +636,7 @@ func TestResourceTransformationQuickstartMock(t *testing.T) { Steps: []resource.TestStep{ step1, + step2, }, }, ) diff --git a/templates/resources/transformation.md.tmpl b/templates/resources/transformation.md.tmpl index d3680971..dab933c6 100644 --- a/templates/resources/transformation.md.tmpl +++ b/templates/resources/transformation.md.tmpl @@ -18,13 +18,13 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["cron1","cron2"] + cron = ["0 */1 * * *"] interval = 601 smart_syncing = true connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "schedule_type1" - days_of_week = ["days_of_week1","days_of_week2"] - time_of_day = "time_of_day1" + schedule_type = "INTEGRATED" + days_of_week = ["MONDAY", "FRIDAY"] + time_of_day = "14:00" } transformation_config { @@ -55,7 +55,6 @@ resource "fivetran_transformation" "transformation" { schedule { schedule_type = "TIME_OF_DAY" - days_of_week = ["MONDAY", "FRIDAY"] time_of_day = "11:00" } @@ -67,6 +66,37 @@ resource "fivetran_transformation" "transformation" { } ``` +## Example Usages for Transformation Schedule section + +```hcl +schedule { + schedule_type = "TIME_OF_DAY" + days_of_week = ["MONDAY", "FRIDAY"] + time_of_day = "11:00" +} +``` + +```hcl +schedule { + schedule_type = "INTEGRATED" + connection_ids = ["connection_id1", "connection_id2"] +} +``` + +```hcl +schedule { + schedule_type = "INTERVAL" + interval = 601 +} +``` + +```hcl +schedule { + schedule_type = "CRON" + cron = ["0 */1 * * *"] +} +``` + {{ .SchemaMarkdown | trimspace }} ## Import From 965a61357e914efc0600ccebfd19f08623eceb22 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Tue, 28 Jan 2025 13:37:03 +0100 Subject: [PATCH 12/13] address comments --- docs/resources/transformation.md | 9 +-- docs/resources/transformation_project.md | 4 +- .../framework/core/model/transformation.go | 38 +++++++++--- .../framework/resources/transformation.go | 26 ++++---- .../resources/transformation_project.go | 61 +++++++++++++------ .../resources/transformation_project_test.go | 29 +++++---- .../resources/transformation_test.go | 1 - templates/resources/transformation.md.tmpl | 9 +-- .../resources/transformation_project.md.tmpl | 4 +- 9 files changed, 108 insertions(+), 73 deletions(-) diff --git a/docs/resources/transformation.md b/docs/resources/transformation.md index 812b96c5..7a703098 100644 --- a/docs/resources/transformation.md +++ b/docs/resources/transformation.md @@ -18,13 +18,8 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["0 */1 * * *"] - interval = 601 - smart_syncing = true - connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "INTEGRATED" - days_of_week = ["MONDAY", "FRIDAY"] - time_of_day = "14:00" + schedule_type = "TIME_OF_DAY" + time_of_day = "11:00" } transformation_config { diff --git a/docs/resources/transformation_project.md b/docs/resources/transformation_project.md index 51981d4b..ce29c1d8 100644 --- a/docs/resources/transformation_project.md +++ b/docs/resources/transformation_project.md @@ -119,9 +119,9 @@ resource "fivetran_transformation_project" "project" { folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" - threads = 0 + threads = 1 target_name = "target_name" - environment_vars = ["environment_var"] + environment_vars = ["DBT_VARIABLE=variable_value"] } } ``` diff --git a/fivetran/framework/core/model/transformation.go b/fivetran/framework/core/model/transformation.go index c46061f8..9b28ef47 100644 --- a/fivetran/framework/core/model/transformation.go +++ b/fivetran/framework/core/model/transformation.go @@ -2,6 +2,7 @@ package model import ( "context" + "fmt" sdk "github.com/fivetran/go-fivetran/transformations" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -71,19 +72,31 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor if resp.Data.TransformationSchedule.ScheduleType == "INTERVAL" || resp.Data.TransformationSchedule.Interval > 0 { scheduleAttrValues["interval"] = types.Int64Value(int64(resp.Data.TransformationSchedule.Interval)) } else { - scheduleAttrValues["interval"] = types.Int64Null() + if !d.Schedule.Attributes()["interval"].IsUnknown() { + scheduleAttrValues["interval"] = d.Schedule.Attributes()["interval"] + } else { + scheduleAttrValues["interval"] = types.Int64Null() + } } if resp.Data.TransformationSchedule.TimeOfDay != "" { scheduleAttrValues["time_of_day"] = types.StringValue(resp.Data.TransformationSchedule.TimeOfDay) } else { - scheduleAttrValues["time_of_day"] = types.StringNull() + if !d.Schedule.Attributes()["time_of_day"].IsUnknown() { + scheduleAttrValues["time_of_day"] = d.Schedule.Attributes()["time_of_day"] + } else { + scheduleAttrValues["time_of_day"] = types.StringNull() + } } if resp.Data.TransformationSchedule.ScheduleType != "" { scheduleAttrValues["schedule_type"] = types.StringValue(resp.Data.TransformationSchedule.ScheduleType) } else { - scheduleAttrValues["schedule_type"] = types.StringNull() + if !d.Schedule.Attributes()["schedule_type"].IsUnknown() { + scheduleAttrValues["schedule_type"] = d.Schedule.Attributes()["schedule_type"] + } else { + scheduleAttrValues["schedule_type"] = types.StringNull() + } } if resp.Data.TransformationSchedule.Cron != nil { @@ -97,7 +110,11 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor scheduleAttrValues["cron"] = types.SetNull(types.StringType) } } else { - scheduleAttrValues["cron"] = types.SetNull(types.StringType) + if !d.Schedule.Attributes()["cron"].IsUnknown() { + scheduleAttrValues["cron"] = d.Schedule.Attributes()["cron"] + } else { + scheduleAttrValues["cron"] = types.SetNull(types.StringType) + } } if resp.Data.TransformationSchedule.ConnectionIds != nil { @@ -111,7 +128,11 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) } } else { - scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + if !d.Schedule.Attributes()["connection_ids"].IsUnknown() { + scheduleAttrValues["connection_ids"] = d.Schedule.Attributes()["connection_ids"] + } else { + scheduleAttrValues["connection_ids"] = types.SetNull(types.StringType) + } } if resp.Data.TransformationSchedule.DaysOfWeek != nil { @@ -125,7 +146,11 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) } } else { - scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + if !d.Schedule.Attributes()["days_of_week"].IsUnknown() { + scheduleAttrValues["days_of_week"] = d.Schedule.Attributes()["days_of_week"] + } else { + scheduleAttrValues["days_of_week"] = types.SetNull(types.StringType) + } } d.Schedule = types.ObjectValueMust(scheduleAttrs, scheduleAttrValues) @@ -193,6 +218,5 @@ func (d *Transformation) ReadFromResponse(ctx context.Context, resp sdk.Transfor configAttrValues["steps"] = types.ListNull(stepSetAttrType) } - d.Config = types.ObjectValueMust(configAttrs, configAttrValues) } diff --git a/fivetran/framework/resources/transformation.go b/fivetran/framework/resources/transformation.go index 48f20503..55595812 100644 --- a/fivetran/framework/resources/transformation.go +++ b/fivetran/framework/resources/transformation.go @@ -309,15 +309,16 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, svc := r.GetClient().NewTransformationUpdate().TransformationId(state.Id.ValueString()) + hasChanges := false pausedPlan := core.GetBoolOrDefault(plan.Paused, true) pausedState := core.GetBoolOrDefault(state.Paused, true) if pausedPlan != pausedState { svc.Paused(pausedPlan) + hasChanges = true } if !plan.Config.IsNull() && !plan.Config.IsUnknown() && !plan.Config.Equal(state.Config) { - hasChanges := false config := fivetran.NewTransformationConfig() configPlanAttributes := plan.Config.Attributes() configStateAttributes := state.Config.Attributes() @@ -388,7 +389,6 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, } if !plan.Schedule.IsNull() && !plan.Schedule.IsUnknown() && !plan.Schedule.Equal(state.Schedule) { - hasChanges := false schedule := fivetran.NewTransformationSchedule() schedulePlanAttributes := plan.Schedule.Attributes() scheduleStateAttributes := state.Schedule.Attributes() @@ -397,7 +397,7 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, !schedulePlanAttributes["time_of_day"].IsUnknown() && !scheduleStateAttributes["time_of_day"].(basetypes.StringValue).Equal(schedulePlanAttributes["time_of_day"].(basetypes.StringValue)) { hasChanges = true - schedule.TimeOfDay(schedulePlanAttributes["time_of_day"].(basetypes.StringValue).ValueString()) + schedule.TimeOfDay(schedulePlanAttributes["time_of_day"].(basetypes.StringValue).ValueString()) } if !schedulePlanAttributes["schedule_type"].IsNull() && @@ -467,17 +467,19 @@ func (r *transformation) Update(ctx context.Context, req resource.UpdateRequest, } } - updateResponse, err := svc.Do(ctx) + if hasChanges { + updateResponse, err := svc.Do(ctx) - if err != nil { - resp.Diagnostics.AddError( - "Unable to Update Transformation Resource.", - fmt.Sprintf("%v; code: %v; message: %v", err, updateResponse.Code, updateResponse.Message), - ) - return - } + if err != nil { + resp.Diagnostics.AddError( + "Unable to Update Transformation Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, updateResponse.Code, updateResponse.Message), + ) + return + } - plan.ReadFromResponse(ctx, updateResponse) + plan.ReadFromResponse(ctx, updateResponse) + } resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } diff --git a/fivetran/framework/resources/transformation_project.go b/fivetran/framework/resources/transformation_project.go index dc4c91a6..817f165a 100644 --- a/fivetran/framework/resources/transformation_project.go +++ b/fivetran/framework/resources/transformation_project.go @@ -65,13 +65,34 @@ func (r *transformationProject) Create(ctx context.Context, req resource.CreateR if !data.ProjectConfig.IsNull() && !data.ProjectConfig.IsUnknown() { projectConfig := fivetran.NewTransformationProjectConfig() projectConfigAttributes := data.ProjectConfig.Attributes() - projectConfig.DbtVersion(projectConfigAttributes["dbt_version"].(basetypes.StringValue).ValueString()) - projectConfig.DefaultSchema(projectConfigAttributes["default_schema"].(basetypes.StringValue).ValueString()) - projectConfig.GitRemoteUrl(projectConfigAttributes["git_remote_url"].(basetypes.StringValue).ValueString()) - projectConfig.FolderPath(projectConfigAttributes["folder_path"].(basetypes.StringValue).ValueString()) - projectConfig.GitBranch(projectConfigAttributes["git_branch"].(basetypes.StringValue).ValueString()) - projectConfig.TargetName(projectConfigAttributes["target_name"].(basetypes.StringValue).ValueString()) - projectConfig.Threads(int(projectConfigAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + + if !projectConfigAttributes["dbt_version"].IsNull() && !projectConfigAttributes["dbt_version"].IsUnknown() { + projectConfig.DbtVersion(projectConfigAttributes["dbt_version"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["default_schema"].IsNull() && !projectConfigAttributes["default_schema"].IsUnknown() { + projectConfig.DefaultSchema(projectConfigAttributes["default_schema"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["git_remote_url"].IsNull() && !projectConfigAttributes["git_remote_url"].IsUnknown() { + projectConfig.GitRemoteUrl(projectConfigAttributes["git_remote_url"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["folder_path"].IsNull() && !projectConfigAttributes["folder_path"].IsUnknown() { + projectConfig.FolderPath(projectConfigAttributes["folder_path"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["git_branch"].IsNull() && !projectConfigAttributes["git_branch"].IsUnknown() { + projectConfig.GitBranch(projectConfigAttributes["git_branch"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["target_name"].IsNull() && !projectConfigAttributes["target_name"].IsUnknown() { + projectConfig.TargetName(projectConfigAttributes["target_name"].(basetypes.StringValue).ValueString()) + } + + if !projectConfigAttributes["threads"].IsNull() && !projectConfigAttributes["threads"].IsUnknown() { + projectConfig.Threads(int(projectConfigAttributes["threads"].(basetypes.Int64Value).ValueInt64())) + } if !projectConfigAttributes["environment_vars"].IsUnknown() && !projectConfigAttributes["environment_vars"].IsNull() { evars := []string{} @@ -166,22 +187,20 @@ func (r *transformationProject) Update(ctx context.Context, req resource.UpdateR svc := r.GetClient().NewTransformationProjectUpdate() svc.ProjectId(state.Id.ValueString()) - + hasChanges := false runTestsPlan := core.GetBoolOrDefault(plan.RunTests, true) runTestsState := core.GetBoolOrDefault(state.RunTests, true) if runTestsPlan != runTestsState { + hasChanges = true svc.RunTests(runTestsPlan) } if !plan.ProjectConfig.IsUnknown() && !state.ProjectConfig.Equal(plan.ProjectConfig) { - hasChanges := false projectConfig := fivetran.NewTransformationProjectConfig() configPlanAttributes := plan.ProjectConfig.Attributes() configStateAttributes := state.ProjectConfig.Attributes() - fmt.Printf("configPlanAttributes %v\n", configPlanAttributes) - fmt.Printf("configStateAttributes %v\n", configStateAttributes) if !configPlanAttributes["folder_path"].IsNull() && !configPlanAttributes["folder_path"].IsUnknown() && !configStateAttributes["folder_path"].(basetypes.StringValue).Equal(configPlanAttributes["folder_path"].(basetypes.StringValue)) { @@ -226,17 +245,19 @@ func (r *transformationProject) Update(ctx context.Context, req resource.UpdateR } } - projectResponse, err := svc.Do(ctx) + if hasChanges { + projectResponse, err := svc.Do(ctx) - if err != nil { - resp.Diagnostics.AddError( - "Unable to Update Transformation Project Resource.", - fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), - ) - return - } + if err != nil { + resp.Diagnostics.AddError( + "Unable to Update Transformation Project Resource.", + fmt.Sprintf("%v; code: %v; message: %v", err, projectResponse.Code, projectResponse.Message), + ) + return + } - plan.ReadFromResponse(ctx, projectResponse) + plan.ReadFromResponse(ctx, projectResponse) + } resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) } diff --git a/fivetran/framework/resources/transformation_project_test.go b/fivetran/framework/resources/transformation_project_test.go index 98502b83..e704f297 100644 --- a/fivetran/framework/resources/transformation_project_test.go +++ b/fivetran/framework/resources/transformation_project_test.go @@ -77,11 +77,11 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { "default_schema": "default_schema", "git_remote_url": "git_remote_url", "folder_path": "folder_path", - "git_branch": "git_branch", - "threads": 0, - "target_name": "target_name", + "git_branch": "git_branch1", + "threads": 1, + "target_name": "target_name1", "environment_vars": [ - "environment_var" + "environment_var1" ], "public_key": "public_key" } @@ -108,7 +108,6 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { tfmock.AssertKeyExistsAndHasValue(t, config, "dbt_version", "dbt_version") tfmock.AssertKeyExistsAndHasValue(t, config, "default_schema", "default_schema") tfmock.AssertKeyExistsAndHasValue(t, config, "target_name", "target_name") - transformationProjectResourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectResponse) return tfmock.FivetranSuccessResponse(t, req, http.StatusCreated, "Success", transformationProjectResourceMockData), nil @@ -125,12 +124,12 @@ func setupMockClientTransformationProjectResourceMappingTest(t *testing.T) { tfmock.AssertKeyExists(t, body, "project_config") config := body["project_config"].(map[string]interface{}) tfmock.AssertKeyDoesNotExist(t, config, "git_remote_url") - tfmock.AssertKeyDoesNotExist(t, config, "git_branch") + tfmock.AssertKeyExistsAndHasValue(t, config, "git_branch", "git_branch1") tfmock.AssertKeyExistsAndHasValue(t, config, "folder_path", "folder_path") tfmock.AssertKeyDoesNotExist(t, config, "dbt_version") tfmock.AssertKeyDoesNotExist(t, config, "default_schema") - tfmock.AssertKeyDoesNotExist(t, config, "target_name") - + tfmock.AssertKeyExistsAndHasValue(t, config, "target_name", "target_name1") + transformationProjectResourceMockData = tfmock.CreateMapFromJsonString(t, transformationProjectPatchedResponse) return tfmock.FivetranSuccessResponse(t, req, http.StatusOK, "Success", transformationProjectResourceMockData), nil }, @@ -198,13 +197,13 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { project_config { git_remote_url = "git_remote_url" - git_branch = "git_branch" + git_branch = "git_branch1" folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" - threads = 0 - target_name = "target_name" - environment_vars = ["environment_var"] + threads = 1 + target_name = "target_name1" + environment_vars = ["environment_var1"] } }`, @@ -224,10 +223,10 @@ func TestResourceTransformationProjectMappingMock(t *testing.T) { resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.dbt_version", "dbt_version"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.public_key", "public_key"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.default_schema", "default_schema"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.target_name", "target_name"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.target_name", "target_name1"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.environment_vars.0", "environment_var1"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_remote_url", "git_remote_url"), - resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch"), + resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.git_branch", "git_branch1"), resource.TestCheckResourceAttr("fivetran_transformation_project.project", "project_config.folder_path", "folder_path"), ), } diff --git a/fivetran/framework/resources/transformation_test.go b/fivetran/framework/resources/transformation_test.go index 55e2e1b4..13f83f3b 100644 --- a/fivetran/framework/resources/transformation_test.go +++ b/fivetran/framework/resources/transformation_test.go @@ -153,7 +153,6 @@ var ( "cron": [ "cron1","cron2" ], - "interval": 601, "smart_syncing": true, "schedule_type": "schedule_type1", "days_of_week": [ diff --git a/templates/resources/transformation.md.tmpl b/templates/resources/transformation.md.tmpl index dab933c6..e6c44808 100644 --- a/templates/resources/transformation.md.tmpl +++ b/templates/resources/transformation.md.tmpl @@ -18,13 +18,8 @@ resource "fivetran_transformation" "transformation" { paused = true schedule { - cron = ["0 */1 * * *"] - interval = 601 - smart_syncing = true - connection_ids = ["connection_id1", "connection_id2"] - schedule_type = "INTEGRATED" - days_of_week = ["MONDAY", "FRIDAY"] - time_of_day = "14:00" + schedule_type = "TIME_OF_DAY" + time_of_day = "11:00" } transformation_config { diff --git a/templates/resources/transformation_project.md.tmpl b/templates/resources/transformation_project.md.tmpl index 996d32f1..cff2011f 100644 --- a/templates/resources/transformation_project.md.tmpl +++ b/templates/resources/transformation_project.md.tmpl @@ -82,9 +82,9 @@ resource "fivetran_transformation_project" "project" { folder_path = "folder_path" dbt_version = "dbt_version" default_schema = "default_schema" - threads = 0 + threads = 1 target_name = "target_name" - environment_vars = ["environment_var"] + environment_vars = ["DBT_VARIABLE=variable_value"] } } ``` From c5ca4adf78ae394e4a69741cac4f8e448b65fbc8 Mon Sep 17 00:00:00 2001 From: Aleksandr Boldyrev Date: Tue, 28 Jan 2025 14:57:52 +0100 Subject: [PATCH 13/13] Update transformation.go --- fivetran/framework/core/model/transformation.go | 1 - 1 file changed, 1 deletion(-) diff --git a/fivetran/framework/core/model/transformation.go b/fivetran/framework/core/model/transformation.go index 9b28ef47..fbccbafd 100644 --- a/fivetran/framework/core/model/transformation.go +++ b/fivetran/framework/core/model/transformation.go @@ -2,7 +2,6 @@ package model import ( "context" - "fmt" sdk "github.com/fivetran/go-fivetran/transformations" "github.com/hashicorp/terraform-plugin-framework/attr"