airbyte.DestinationAwsDatalake
Explore with Pulumi AI
DestinationAwsDatalake Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.DestinationAwsDatalake;
import com.pulumi.airbyte.DestinationAwsDatalakeArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationCredentialsArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs;
import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var myDestinationAwsdatalake = new DestinationAwsDatalake("myDestinationAwsdatalake", DestinationAwsDatalakeArgs.builder()
.configuration(DestinationAwsDatalakeConfigurationArgs.builder()
.aws_account_id("111111111111")
.bucket_name("...my_bucket_name...")
.bucket_prefix("...my_bucket_prefix...")
.credentials(DestinationAwsDatalakeConfigurationCredentialsArgs.builder()
.iamRole(DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs.builder()
.roleArn("...my_role_arn...")
.build())
.build())
.format(DestinationAwsDatalakeConfigurationFormatArgs.builder()
.jsonLinesNewlineDelimitedJson(DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs.builder()
.compressionCodec("UNCOMPRESSED")
.formatType("JSONL")
.build())
.parquetColumnarStorage(DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs.builder()
.compressionCodec("GZIP")
.formatType("Parquet")
.build())
.build())
.glue_catalog_float_as_decimal(true)
.lakeformation_database_default_tag_key("pii_level")
.lakeformation_database_default_tag_values("private,public")
.lakeformation_database_name("...my_lakeformation_database_name...")
.lakeformation_governed_tables(true)
.partitioning("DAY")
.region("ap-southeast-4")
.build())
.definitionId("aa9c2d01-84b7-4474-ba6f-e45dbbc28cdd")
.workspaceId("3df68150-9956-454d-8144-1645f409cdd1")
.build());
}
}
resources:
myDestinationAwsdatalake:
type: airbyte:DestinationAwsDatalake
properties:
configuration:
aws_account_id: '111111111111'
bucket_name: '...my_bucket_name...'
bucket_prefix: '...my_bucket_prefix...'
credentials:
iamRole:
roleArn: '...my_role_arn...'
format:
jsonLinesNewlineDelimitedJson:
compressionCodec: UNCOMPRESSED
formatType: JSONL
parquetColumnarStorage:
compressionCodec: GZIP
formatType: Parquet
glue_catalog_float_as_decimal: true
lakeformation_database_default_tag_key: pii_level
lakeformation_database_default_tag_values: private,public
lakeformation_database_name: '...my_lakeformation_database_name...'
lakeformation_governed_tables: true
partitioning: DAY
region: ap-southeast-4
definitionId: aa9c2d01-84b7-4474-ba6f-e45dbbc28cdd
workspaceId: 3df68150-9956-454d-8144-1645f409cdd1
Create DestinationAwsDatalake Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DestinationAwsDatalake(name: string, args: DestinationAwsDatalakeArgs, opts?: CustomResourceOptions);
@overload
def DestinationAwsDatalake(resource_name: str,
args: DestinationAwsDatalakeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DestinationAwsDatalake(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationAwsDatalakeConfigurationArgs] = None,
workspace_id: Optional[str] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None)
func NewDestinationAwsDatalake(ctx *Context, name string, args DestinationAwsDatalakeArgs, opts ...ResourceOption) (*DestinationAwsDatalake, error)
public DestinationAwsDatalake(string name, DestinationAwsDatalakeArgs args, CustomResourceOptions? opts = null)
public DestinationAwsDatalake(String name, DestinationAwsDatalakeArgs args)
public DestinationAwsDatalake(String name, DestinationAwsDatalakeArgs args, CustomResourceOptions options)
type: airbyte:DestinationAwsDatalake
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DestinationAwsDatalakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DestinationAwsDatalakeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DestinationAwsDatalakeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DestinationAwsDatalakeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DestinationAwsDatalakeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var destinationAwsDatalakeResource = new Airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource", new()
{
Configuration = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationArgs
{
BucketName = "string",
Credentials = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsArgs
{
IamRole = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs
{
RoleArn = "string",
},
IamUser = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsIamUserArgs
{
AwsAccessKeyId = "string",
AwsSecretAccessKey = "string",
},
},
LakeformationDatabaseName = "string",
AwsAccountId = "string",
BucketPrefix = "string",
Format = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatArgs
{
JsonLinesNewlineDelimitedJson = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs
{
CompressionCodec = "string",
FormatType = "string",
},
ParquetColumnarStorage = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs
{
CompressionCodec = "string",
FormatType = "string",
},
},
GlueCatalogFloatAsDecimal = false,
LakeformationDatabaseDefaultTagKey = "string",
LakeformationDatabaseDefaultTagValues = "string",
LakeformationGovernedTables = false,
Partitioning = "string",
Region = "string",
},
WorkspaceId = "string",
DefinitionId = "string",
Name = "string",
});
example, err := airbyte.NewDestinationAwsDatalake(ctx, "destinationAwsDatalakeResource", &airbyte.DestinationAwsDatalakeArgs{
Configuration: &.DestinationAwsDatalakeConfigurationArgs{
BucketName: pulumi.String("string"),
Credentials: &.DestinationAwsDatalakeConfigurationCredentialsArgs{
IamRole: &.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs{
RoleArn: pulumi.String("string"),
},
IamUser: &.DestinationAwsDatalakeConfigurationCredentialsIamUserArgs{
AwsAccessKeyId: pulumi.String("string"),
AwsSecretAccessKey: pulumi.String("string"),
},
},
LakeformationDatabaseName: pulumi.String("string"),
AwsAccountId: pulumi.String("string"),
BucketPrefix: pulumi.String("string"),
Format: &.DestinationAwsDatalakeConfigurationFormatArgs{
JsonLinesNewlineDelimitedJson: &.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs{
CompressionCodec: pulumi.String("string"),
FormatType: pulumi.String("string"),
},
ParquetColumnarStorage: &.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs{
CompressionCodec: pulumi.String("string"),
FormatType: pulumi.String("string"),
},
},
GlueCatalogFloatAsDecimal: pulumi.Bool(false),
LakeformationDatabaseDefaultTagKey: pulumi.String("string"),
LakeformationDatabaseDefaultTagValues: pulumi.String("string"),
LakeformationGovernedTables: pulumi.Bool(false),
Partitioning: pulumi.String("string"),
Region: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var destinationAwsDatalakeResource = new DestinationAwsDatalake("destinationAwsDatalakeResource", DestinationAwsDatalakeArgs.builder()
.configuration(DestinationAwsDatalakeConfigurationArgs.builder()
.bucketName("string")
.credentials(DestinationAwsDatalakeConfigurationCredentialsArgs.builder()
.iamRole(DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs.builder()
.roleArn("string")
.build())
.iamUser(DestinationAwsDatalakeConfigurationCredentialsIamUserArgs.builder()
.awsAccessKeyId("string")
.awsSecretAccessKey("string")
.build())
.build())
.lakeformationDatabaseName("string")
.awsAccountId("string")
.bucketPrefix("string")
.format(DestinationAwsDatalakeConfigurationFormatArgs.builder()
.jsonLinesNewlineDelimitedJson(DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs.builder()
.compressionCodec("string")
.formatType("string")
.build())
.parquetColumnarStorage(DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs.builder()
.compressionCodec("string")
.formatType("string")
.build())
.build())
.glueCatalogFloatAsDecimal(false)
.lakeformationDatabaseDefaultTagKey("string")
.lakeformationDatabaseDefaultTagValues("string")
.lakeformationGovernedTables(false)
.partitioning("string")
.region("string")
.build())
.workspaceId("string")
.definitionId("string")
.name("string")
.build());
destination_aws_datalake_resource = airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource",
configuration={
"bucket_name": "string",
"credentials": {
"iam_role": {
"role_arn": "string",
},
"iam_user": {
"aws_access_key_id": "string",
"aws_secret_access_key": "string",
},
},
"lakeformation_database_name": "string",
"aws_account_id": "string",
"bucket_prefix": "string",
"format": {
"json_lines_newline_delimited_json": {
"compression_codec": "string",
"format_type": "string",
},
"parquet_columnar_storage": {
"compression_codec": "string",
"format_type": "string",
},
},
"glue_catalog_float_as_decimal": False,
"lakeformation_database_default_tag_key": "string",
"lakeformation_database_default_tag_values": "string",
"lakeformation_governed_tables": False,
"partitioning": "string",
"region": "string",
},
workspace_id="string",
definition_id="string",
name="string")
const destinationAwsDatalakeResource = new airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource", {
configuration: {
bucketName: "string",
credentials: {
iamRole: {
roleArn: "string",
},
iamUser: {
awsAccessKeyId: "string",
awsSecretAccessKey: "string",
},
},
lakeformationDatabaseName: "string",
awsAccountId: "string",
bucketPrefix: "string",
format: {
jsonLinesNewlineDelimitedJson: {
compressionCodec: "string",
formatType: "string",
},
parquetColumnarStorage: {
compressionCodec: "string",
formatType: "string",
},
},
glueCatalogFloatAsDecimal: false,
lakeformationDatabaseDefaultTagKey: "string",
lakeformationDatabaseDefaultTagValues: "string",
lakeformationGovernedTables: false,
partitioning: "string",
region: "string",
},
workspaceId: "string",
definitionId: "string",
name: "string",
});
type: airbyte:DestinationAwsDatalake
properties:
configuration:
awsAccountId: string
bucketName: string
bucketPrefix: string
credentials:
iamRole:
roleArn: string
iamUser:
awsAccessKeyId: string
awsSecretAccessKey: string
format:
jsonLinesNewlineDelimitedJson:
compressionCodec: string
formatType: string
parquetColumnarStorage:
compressionCodec: string
formatType: string
glueCatalogFloatAsDecimal: false
lakeformationDatabaseDefaultTagKey: string
lakeformationDatabaseDefaultTagValues: string
lakeformationDatabaseName: string
lakeformationGovernedTables: false
partitioning: string
region: string
definitionId: string
name: string
workspaceId: string
DestinationAwsDatalake Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DestinationAwsDatalake resource accepts the following input properties:
- Configuration
Destination
Aws Datalake Configuration - Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- Configuration
Destination
Aws Datalake Configuration Args - Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Aws Datalake Configuration - workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Aws Datalake Configuration - workspace
Id string - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Aws Datalake Configuration Args - workspace_
id str - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the destination e.g. dev-mysql-instance.
- configuration Property Map
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
Outputs
All input properties are implicitly available as output properties. Additionally, the DestinationAwsDatalake resource produces the following output properties:
- Created
At double - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Created
At float64 - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Double - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At number - destination
Id string - destination
Type string - id string
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created_
at float - destination_
id str - destination_
type str - id str
- The provider-assigned unique ID for this managed resource.
- resource_
allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Number - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
Look up Existing DestinationAwsDatalake Resource
Get an existing DestinationAwsDatalake resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DestinationAwsDatalakeState, opts?: CustomResourceOptions): DestinationAwsDatalake
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationAwsDatalakeConfigurationArgs] = None,
created_at: Optional[float] = None,
definition_id: Optional[str] = None,
destination_id: Optional[str] = None,
destination_type: Optional[str] = None,
name: Optional[str] = None,
resource_allocation: Optional[DestinationAwsDatalakeResourceAllocationArgs] = None,
workspace_id: Optional[str] = None) -> DestinationAwsDatalake
func GetDestinationAwsDatalake(ctx *Context, name string, id IDInput, state *DestinationAwsDatalakeState, opts ...ResourceOption) (*DestinationAwsDatalake, error)
public static DestinationAwsDatalake Get(string name, Input<string> id, DestinationAwsDatalakeState? state, CustomResourceOptions? opts = null)
public static DestinationAwsDatalake get(String name, Output<String> id, DestinationAwsDatalakeState state, CustomResourceOptions options)
resources: _: type: airbyte:DestinationAwsDatalake get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Destination
Aws Datalake Configuration - Created
At double - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- Configuration
Destination
Aws Datalake Configuration Args - Created
At float64 - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationAws Datalake Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- configuration
Destination
Aws Datalake Configuration - created
At Double - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
- configuration
Destination
Aws Datalake Configuration - created
At number - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id string - destination
Type string - name string
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationAws Datalake Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id string
- configuration
Destination
Aws Datalake Configuration Args - created_
at float - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination_
id str - destination_
type str - name str
- Name of the destination e.g. dev-mysql-instance.
- resource_
allocation DestinationAws Datalake Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace_
id str
- configuration Property Map
- created
At Number - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
Supporting Types
DestinationAwsDatalakeConfiguration, DestinationAwsDatalakeConfigurationArgs
- Bucket
Name string - The name of the S3 bucket. Read more \n\nhere\n\n.
- Credentials
Destination
Aws Datalake Configuration Credentials - Choose How to Authenticate to AWS.
- Lakeformation
Database stringName - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- Aws
Account stringId - target aws account id
- Bucket
Prefix string - S3 prefix
- Format
Destination
Aws Datalake Configuration Format - Format of the data output.
- Glue
Catalog boolFloat As Decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- Lakeformation
Database stringDefault Tag Key - Add a default tag key to databases created by this destination
- Lakeformation
Database stringDefault Tag Values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - Lakeformation
Governed boolTables - Whether to create tables as LF governed tables. Default: false
- Partitioning string
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- Region string
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
- Bucket
Name string - The name of the S3 bucket. Read more \n\nhere\n\n.
- Credentials
Destination
Aws Datalake Configuration Credentials - Choose How to Authenticate to AWS.
- Lakeformation
Database stringName - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- Aws
Account stringId - target aws account id
- Bucket
Prefix string - S3 prefix
- Format
Destination
Aws Datalake Configuration Format - Format of the data output.
- Glue
Catalog boolFloat As Decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- Lakeformation
Database stringDefault Tag Key - Add a default tag key to databases created by this destination
- Lakeformation
Database stringDefault Tag Values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - Lakeformation
Governed boolTables - Whether to create tables as LF governed tables. Default: false
- Partitioning string
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- Region string
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
- bucket
Name String - The name of the S3 bucket. Read more \n\nhere\n\n.
- credentials
Destination
Aws Datalake Configuration Credentials - Choose How to Authenticate to AWS.
- lakeformation
Database StringName - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- aws
Account StringId - target aws account id
- bucket
Prefix String - S3 prefix
- format
Destination
Aws Datalake Configuration Format - Format of the data output.
- glue
Catalog BooleanFloat As Decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- lakeformation
Database StringDefault Tag Key - Add a default tag key to databases created by this destination
- lakeformation
Database StringDefault Tag Values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - lakeformation
Governed BooleanTables - Whether to create tables as LF governed tables. Default: false
- partitioning String
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- region String
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
- bucket
Name string - The name of the S3 bucket. Read more \n\nhere\n\n.
- credentials
Destination
Aws Datalake Configuration Credentials - Choose How to Authenticate to AWS.
- lakeformation
Database stringName - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- aws
Account stringId - target aws account id
- bucket
Prefix string - S3 prefix
- format
Destination
Aws Datalake Configuration Format - Format of the data output.
- glue
Catalog booleanFloat As Decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- lakeformation
Database stringDefault Tag Key - Add a default tag key to databases created by this destination
- lakeformation
Database stringDefault Tag Values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - lakeformation
Governed booleanTables - Whether to create tables as LF governed tables. Default: false
- partitioning string
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- region string
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
- bucket_
name str - The name of the S3 bucket. Read more \n\nhere\n\n.
- credentials
Destination
Aws Datalake Configuration Credentials - Choose How to Authenticate to AWS.
- lakeformation_
database_ strname - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- aws_
account_ strid - target aws account id
- bucket_
prefix str - S3 prefix
- format
Destination
Aws Datalake Configuration Format - Format of the data output.
- glue_
catalog_ boolfloat_ as_ decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- lakeformation_
database_ strdefault_ tag_ key - Add a default tag key to databases created by this destination
- lakeformation_
database_ strdefault_ tag_ values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - lakeformation_
governed_ booltables - Whether to create tables as LF governed tables. Default: false
- partitioning str
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- region str
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
- bucket
Name String - The name of the S3 bucket. Read more \n\nhere\n\n.
- credentials Property Map
- Choose How to Authenticate to AWS.
- lakeformation
Database StringName - The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
- aws
Account StringId - target aws account id
- bucket
Prefix String - S3 prefix
- format Property Map
- Format of the data output.
- glue
Catalog BooleanFloat As Decimal - Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
- lakeformation
Database StringDefault Tag Key - Add a default tag key to databases created by this destination
- lakeformation
Database StringDefault Tag Values - Add default values for the
Tag Key
to databases created by this destination. Comma separate for multiple values. - lakeformation
Governed BooleanTables - Whether to create tables as LF governed tables. Default: false
- partitioning String
- Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
- region String
- The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
DestinationAwsDatalakeConfigurationCredentials, DestinationAwsDatalakeConfigurationCredentialsArgs
DestinationAwsDatalakeConfigurationCredentialsIamRole, DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs
- Role
Arn string - Will assume this role to write data to s3
- Role
Arn string - Will assume this role to write data to s3
- role
Arn String - Will assume this role to write data to s3
- role
Arn string - Will assume this role to write data to s3
- role_
arn str - Will assume this role to write data to s3
- role
Arn String - Will assume this role to write data to s3
DestinationAwsDatalakeConfigurationCredentialsIamUser, DestinationAwsDatalakeConfigurationCredentialsIamUserArgs
- Aws
Access stringKey Id - AWS User Access Key Id
- Aws
Secret stringAccess Key - Secret Access Key
- Aws
Access stringKey Id - AWS User Access Key Id
- Aws
Secret stringAccess Key - Secret Access Key
- aws
Access StringKey Id - AWS User Access Key Id
- aws
Secret StringAccess Key - Secret Access Key
- aws
Access stringKey Id - AWS User Access Key Id
- aws
Secret stringAccess Key - Secret Access Key
- aws_
access_ strkey_ id - AWS User Access Key Id
- aws_
secret_ straccess_ key - Secret Access Key
- aws
Access StringKey Id - AWS User Access Key Id
- aws
Secret StringAccess Key - Secret Access Key
DestinationAwsDatalakeConfigurationFormat, DestinationAwsDatalakeConfigurationFormatArgs
DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJson, DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs
- Compression
Codec string - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- Format
Type string - Default: "JSONL"; must be "JSONL"
- Compression
Codec string - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- Format
Type string - Default: "JSONL"; must be "JSONL"
- compression
Codec String - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- format
Type String - Default: "JSONL"; must be "JSONL"
- compression
Codec string - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- format
Type string - Default: "JSONL"; must be "JSONL"
- compression_
codec str - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- format_
type str - Default: "JSONL"; must be "JSONL"
- compression
Codec String - The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
- format
Type String - Default: "JSONL"; must be "JSONL"
DestinationAwsDatalakeConfigurationFormatParquetColumnarStorage, DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs
- Compression
Codec string - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- Format
Type string - Default: "Parquet"; must be "Parquet"
- Compression
Codec string - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- Format
Type string - Default: "Parquet"; must be "Parquet"
- compression
Codec String - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- format
Type String - Default: "Parquet"; must be "Parquet"
- compression
Codec string - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- format
Type string - Default: "Parquet"; must be "Parquet"
- compression_
codec str - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- format_
type str - Default: "Parquet"; must be "Parquet"
- compression
Codec String - The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
- format
Type String - Default: "Parquet"; must be "Parquet"
DestinationAwsDatalakeResourceAllocation, DestinationAwsDatalakeResourceAllocationArgs
- Default
Destination
Aws Datalake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics List<DestinationAws Datalake Resource Allocation Job Specific>
- Default
Destination
Aws Datalake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics []DestinationAws Datalake Resource Allocation Job Specific
- default_
Destination
Aws Datalake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<DestinationAws Datalake Resource Allocation Job Specific>
- default
Destination
Aws Datalake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics DestinationAws Datalake Resource Allocation Job Specific[]
- default
Destination
Aws Datalake Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job_
specifics Sequence[DestinationAws Datalake Resource Allocation Job Specific]
- default Property Map
- optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<Property Map>
DestinationAwsDatalakeResourceAllocationDefault, DestinationAwsDatalakeResourceAllocationDefaultArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
DestinationAwsDatalakeResourceAllocationJobSpecific, DestinationAwsDatalakeResourceAllocationJobSpecificArgs
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements DestinationAws Datalake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements DestinationAws Datalake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements DestinationAws Datalake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements DestinationAws Datalake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job_
type str - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource_
requirements DestinationAws Datalake Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements Property Map - optional resource requirements to run workers (blank for unbounded allocations)
DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements, DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirementsArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
Import
$ pulumi import airbyte:index/destinationAwsDatalake:DestinationAwsDatalake my_airbyte_destination_aws_datalake ""
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the
airbyte
Terraform Provider.