airbyte.SourceSftpBulk
Explore with Pulumi AI
SourceSftpBulk Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.SourceSftpBulk;
import com.pulumi.airbyte.SourceSftpBulkArgs;
import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationArgs;
import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationCredentialsArgs;
import com.pulumi.airbyte.inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var mySourceSftpbulk = new SourceSftpBulk("mySourceSftpbulk", SourceSftpBulkArgs.builder()
.configuration(SourceSftpBulkConfigurationArgs.builder()
.credentials(SourceSftpBulkConfigurationCredentialsArgs.builder()
.authenticateViaPassword(SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs.builder()
.password("...my_password...")
.build())
.build())
.delivery_method(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.folder_path("/logs/2022")
.host("www.host.com")
.port(22)
.start_date("2021-01-01T00:00:00.000000Z")
.streams(SourceSftpBulkConfigurationStreamArgs.builder()
.daysToSyncIfHistoryIsFull(5)
.format()
.globs("...")
.inputSchema("...my_input_schema...")
.name("...my_name...")
.recentNFilesToReadForSchemaDiscovery(4)
.schemaless(false)
.validationPolicy("Skip Record")
.build())
.username("...my_username...")
.build())
.definitionId("68ea307f-7d93-44b6-91fe-f4681001bb74")
.secretId("...my_secret_id...")
.workspaceId("ec1a23b7-466d-4987-acf2-eae96eb456d0")
.build());
}
}
resources:
mySourceSftpbulk:
type: airbyte:SourceSftpBulk
properties:
configuration:
credentials:
authenticateViaPassword:
password: '...my_password...'
delivery_method:
replicateRecords: {}
folder_path: /logs/2022
host: www.host.com
port: 22
start_date: 2021-01-01T00:00:00.000000Z
streams:
- daysToSyncIfHistoryIsFull: 5
format: {}
globs:
- '...'
inputSchema: '...my_input_schema...'
name: '...my_name...'
recentNFilesToReadForSchemaDiscovery: 4
schemaless: false
validationPolicy: Skip Record
username: '...my_username...'
definitionId: 68ea307f-7d93-44b6-91fe-f4681001bb74
secretId: '...my_secret_id...'
workspaceId: ec1a23b7-466d-4987-acf2-eae96eb456d0
Create SourceSftpBulk Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new SourceSftpBulk(name: string, args: SourceSftpBulkArgs, opts?: CustomResourceOptions);
@overload
def SourceSftpBulk(resource_name: str,
args: SourceSftpBulkArgs,
opts: Optional[ResourceOptions] = None)
@overload
def SourceSftpBulk(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[SourceSftpBulkConfigurationArgs] = None,
workspace_id: Optional[str] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None,
secret_id: Optional[str] = None)
func NewSourceSftpBulk(ctx *Context, name string, args SourceSftpBulkArgs, opts ...ResourceOption) (*SourceSftpBulk, error)
public SourceSftpBulk(string name, SourceSftpBulkArgs args, CustomResourceOptions? opts = null)
public SourceSftpBulk(String name, SourceSftpBulkArgs args)
public SourceSftpBulk(String name, SourceSftpBulkArgs args, CustomResourceOptions options)
type: airbyte:SourceSftpBulk
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SourceSftpBulkArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SourceSftpBulkArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SourceSftpBulkArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SourceSftpBulkArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SourceSftpBulkArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var sourceSftpBulkResource = new Airbyte.SourceSftpBulk("sourceSftpBulkResource", new()
{
Configuration = new Airbyte.Inputs.SourceSftpBulkConfigurationArgs
{
Credentials = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsArgs
{
AuthenticateViaPassword = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs
{
Password = "string",
},
AuthenticateViaPrivateKey = new Airbyte.Inputs.SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs
{
PrivateKey = "string",
},
},
Host = "string",
Streams = new[]
{
new Airbyte.Inputs.SourceSftpBulkConfigurationStreamArgs
{
Format = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatArgs
{
AvroFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatAvroFormatArgs
{
DoubleAsString = false,
},
CsvFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatArgs
{
Delimiter = "string",
DoubleQuote = false,
Encoding = "string",
EscapeChar = "string",
FalseValues = new[]
{
"string",
},
HeaderDefinition = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs
{
Autogenerated = null,
FromCsv = null,
UserProvided = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs
{
ColumnNames = new[]
{
"string",
},
},
},
IgnoreErrorsOnFieldsMismatch = false,
NullValues = new[]
{
"string",
},
QuoteChar = "string",
SkipRowsAfterHeader = 0,
SkipRowsBeforeHeader = 0,
StringsCanBeNull = false,
TrueValues = new[]
{
"string",
},
},
ExcelFormat = null,
JsonlFormat = null,
ParquetFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatParquetFormatArgs
{
DecimalAsFloat = false,
},
UnstructuredDocumentFormat = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs
{
Processing = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs
{
Local = null,
ViaApi = new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs
{
ApiKey = "string",
ApiUrl = "string",
Parameters = new[]
{
new Airbyte.Inputs.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs
{
Name = "string",
Value = "string",
},
},
},
},
SkipUnprocessableFiles = false,
Strategy = "string",
},
},
Name = "string",
DaysToSyncIfHistoryIsFull = 0,
Globs = new[]
{
"string",
},
InputSchema = "string",
RecentNFilesToReadForSchemaDiscovery = 0,
Schemaless = false,
ValidationPolicy = "string",
},
},
Username = "string",
DeliveryMethod = new Airbyte.Inputs.SourceSftpBulkConfigurationDeliveryMethodArgs
{
CopyRawFiles = new Airbyte.Inputs.SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs
{
PreserveDirectoryStructure = false,
},
ReplicateRecords = null,
},
FolderPath = "string",
Port = 0,
StartDate = "string",
},
WorkspaceId = "string",
DefinitionId = "string",
Name = "string",
SecretId = "string",
});
example, err := airbyte.NewSourceSftpBulk(ctx, "sourceSftpBulkResource", &airbyte.SourceSftpBulkArgs{
Configuration: &.SourceSftpBulkConfigurationArgs{
Credentials: &.SourceSftpBulkConfigurationCredentialsArgs{
AuthenticateViaPassword: &.SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs{
Password: pulumi.String("string"),
},
AuthenticateViaPrivateKey: &.SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs{
PrivateKey: pulumi.String("string"),
},
},
Host: pulumi.String("string"),
Streams: .SourceSftpBulkConfigurationStreamArray{
&.SourceSftpBulkConfigurationStreamArgs{
Format: &.SourceSftpBulkConfigurationStreamFormatArgs{
AvroFormat: &.SourceSftpBulkConfigurationStreamFormatAvroFormatArgs{
DoubleAsString: pulumi.Bool(false),
},
CsvFormat: &.SourceSftpBulkConfigurationStreamFormatCsvFormatArgs{
Delimiter: pulumi.String("string"),
DoubleQuote: pulumi.Bool(false),
Encoding: pulumi.String("string"),
EscapeChar: pulumi.String("string"),
FalseValues: pulumi.StringArray{
pulumi.String("string"),
},
HeaderDefinition: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs{
Autogenerated: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionAutogeneratedArgs{
},
FromCsv: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionFromCsvArgs{
},
UserProvided: &.SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs{
ColumnNames: pulumi.StringArray{
pulumi.String("string"),
},
},
},
IgnoreErrorsOnFieldsMismatch: pulumi.Bool(false),
NullValues: pulumi.StringArray{
pulumi.String("string"),
},
QuoteChar: pulumi.String("string"),
SkipRowsAfterHeader: pulumi.Float64(0),
SkipRowsBeforeHeader: pulumi.Float64(0),
StringsCanBeNull: pulumi.Bool(false),
TrueValues: pulumi.StringArray{
pulumi.String("string"),
},
},
ExcelFormat: &.SourceSftpBulkConfigurationStreamFormatExcelFormatArgs{
},
JsonlFormat: &.SourceSftpBulkConfigurationStreamFormatJsonlFormatArgs{
},
ParquetFormat: &.SourceSftpBulkConfigurationStreamFormatParquetFormatArgs{
DecimalAsFloat: pulumi.Bool(false),
},
UnstructuredDocumentFormat: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs{
Processing: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs{
Local: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingLocalArgs{
},
ViaApi: &.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs{
ApiKey: pulumi.String("string"),
ApiUrl: pulumi.String("string"),
Parameters: .SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArray{
&.SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
},
SkipUnprocessableFiles: pulumi.Bool(false),
Strategy: pulumi.String("string"),
},
},
Name: pulumi.String("string"),
DaysToSyncIfHistoryIsFull: pulumi.Float64(0),
Globs: pulumi.StringArray{
pulumi.String("string"),
},
InputSchema: pulumi.String("string"),
RecentNFilesToReadForSchemaDiscovery: pulumi.Float64(0),
Schemaless: pulumi.Bool(false),
ValidationPolicy: pulumi.String("string"),
},
},
Username: pulumi.String("string"),
DeliveryMethod: &.SourceSftpBulkConfigurationDeliveryMethodArgs{
CopyRawFiles: &.SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs{
PreserveDirectoryStructure: pulumi.Bool(false),
},
ReplicateRecords: &.SourceSftpBulkConfigurationDeliveryMethodReplicateRecordsArgs{
},
},
FolderPath: pulumi.String("string"),
Port: pulumi.Float64(0),
StartDate: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
SecretId: pulumi.String("string"),
})
var sourceSftpBulkResource = new SourceSftpBulk("sourceSftpBulkResource", SourceSftpBulkArgs.builder()
.configuration(SourceSftpBulkConfigurationArgs.builder()
.credentials(SourceSftpBulkConfigurationCredentialsArgs.builder()
.authenticateViaPassword(SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs.builder()
.password("string")
.build())
.authenticateViaPrivateKey(SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs.builder()
.privateKey("string")
.build())
.build())
.host("string")
.streams(SourceSftpBulkConfigurationStreamArgs.builder()
.format(SourceSftpBulkConfigurationStreamFormatArgs.builder()
.avroFormat(SourceSftpBulkConfigurationStreamFormatAvroFormatArgs.builder()
.doubleAsString(false)
.build())
.csvFormat(SourceSftpBulkConfigurationStreamFormatCsvFormatArgs.builder()
.delimiter("string")
.doubleQuote(false)
.encoding("string")
.escapeChar("string")
.falseValues("string")
.headerDefinition(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs.builder()
.autogenerated()
.fromCsv()
.userProvided(SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs.builder()
.columnNames("string")
.build())
.build())
.ignoreErrorsOnFieldsMismatch(false)
.nullValues("string")
.quoteChar("string")
.skipRowsAfterHeader(0)
.skipRowsBeforeHeader(0)
.stringsCanBeNull(false)
.trueValues("string")
.build())
.excelFormat()
.jsonlFormat()
.parquetFormat(SourceSftpBulkConfigurationStreamFormatParquetFormatArgs.builder()
.decimalAsFloat(false)
.build())
.unstructuredDocumentFormat(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs.builder()
.processing(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs.builder()
.local()
.viaApi(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs.builder()
.apiKey("string")
.apiUrl("string")
.parameters(SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs.builder()
.name("string")
.value("string")
.build())
.build())
.build())
.skipUnprocessableFiles(false)
.strategy("string")
.build())
.build())
.name("string")
.daysToSyncIfHistoryIsFull(0)
.globs("string")
.inputSchema("string")
.recentNFilesToReadForSchemaDiscovery(0)
.schemaless(false)
.validationPolicy("string")
.build())
.username("string")
.deliveryMethod(SourceSftpBulkConfigurationDeliveryMethodArgs.builder()
.copyRawFiles(SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs.builder()
.preserveDirectoryStructure(false)
.build())
.replicateRecords()
.build())
.folderPath("string")
.port(0)
.startDate("string")
.build())
.workspaceId("string")
.definitionId("string")
.name("string")
.secretId("string")
.build());
source_sftp_bulk_resource = airbyte.SourceSftpBulk("sourceSftpBulkResource",
configuration={
"credentials": {
"authenticate_via_password": {
"password": "string",
},
"authenticate_via_private_key": {
"private_key": "string",
},
},
"host": "string",
"streams": [{
"format": {
"avro_format": {
"double_as_string": False,
},
"csv_format": {
"delimiter": "string",
"double_quote": False,
"encoding": "string",
"escape_char": "string",
"false_values": ["string"],
"header_definition": {
"autogenerated": {},
"from_csv": {},
"user_provided": {
"column_names": ["string"],
},
},
"ignore_errors_on_fields_mismatch": False,
"null_values": ["string"],
"quote_char": "string",
"skip_rows_after_header": 0,
"skip_rows_before_header": 0,
"strings_can_be_null": False,
"true_values": ["string"],
},
"excel_format": {},
"jsonl_format": {},
"parquet_format": {
"decimal_as_float": False,
},
"unstructured_document_format": {
"processing": {
"local": {},
"via_api": {
"api_key": "string",
"api_url": "string",
"parameters": [{
"name": "string",
"value": "string",
}],
},
},
"skip_unprocessable_files": False,
"strategy": "string",
},
},
"name": "string",
"days_to_sync_if_history_is_full": 0,
"globs": ["string"],
"input_schema": "string",
"recent_n_files_to_read_for_schema_discovery": 0,
"schemaless": False,
"validation_policy": "string",
}],
"username": "string",
"delivery_method": {
"copy_raw_files": {
"preserve_directory_structure": False,
},
"replicate_records": {},
},
"folder_path": "string",
"port": 0,
"start_date": "string",
},
workspace_id="string",
definition_id="string",
name="string",
secret_id="string")
const sourceSftpBulkResource = new airbyte.SourceSftpBulk("sourceSftpBulkResource", {
configuration: {
credentials: {
authenticateViaPassword: {
password: "string",
},
authenticateViaPrivateKey: {
privateKey: "string",
},
},
host: "string",
streams: [{
format: {
avroFormat: {
doubleAsString: false,
},
csvFormat: {
delimiter: "string",
doubleQuote: false,
encoding: "string",
escapeChar: "string",
falseValues: ["string"],
headerDefinition: {
autogenerated: {},
fromCsv: {},
userProvided: {
columnNames: ["string"],
},
},
ignoreErrorsOnFieldsMismatch: false,
nullValues: ["string"],
quoteChar: "string",
skipRowsAfterHeader: 0,
skipRowsBeforeHeader: 0,
stringsCanBeNull: false,
trueValues: ["string"],
},
excelFormat: {},
jsonlFormat: {},
parquetFormat: {
decimalAsFloat: false,
},
unstructuredDocumentFormat: {
processing: {
local: {},
viaApi: {
apiKey: "string",
apiUrl: "string",
parameters: [{
name: "string",
value: "string",
}],
},
},
skipUnprocessableFiles: false,
strategy: "string",
},
},
name: "string",
daysToSyncIfHistoryIsFull: 0,
globs: ["string"],
inputSchema: "string",
recentNFilesToReadForSchemaDiscovery: 0,
schemaless: false,
validationPolicy: "string",
}],
username: "string",
deliveryMethod: {
copyRawFiles: {
preserveDirectoryStructure: false,
},
replicateRecords: {},
},
folderPath: "string",
port: 0,
startDate: "string",
},
workspaceId: "string",
definitionId: "string",
name: "string",
secretId: "string",
});
type: airbyte:SourceSftpBulk
properties:
configuration:
credentials:
authenticateViaPassword:
password: string
authenticateViaPrivateKey:
privateKey: string
deliveryMethod:
copyRawFiles:
preserveDirectoryStructure: false
replicateRecords: {}
folderPath: string
host: string
port: 0
startDate: string
streams:
- daysToSyncIfHistoryIsFull: 0
format:
avroFormat:
doubleAsString: false
csvFormat:
delimiter: string
doubleQuote: false
encoding: string
escapeChar: string
falseValues:
- string
headerDefinition:
autogenerated: {}
fromCsv: {}
userProvided:
columnNames:
- string
ignoreErrorsOnFieldsMismatch: false
nullValues:
- string
quoteChar: string
skipRowsAfterHeader: 0
skipRowsBeforeHeader: 0
stringsCanBeNull: false
trueValues:
- string
excelFormat: {}
jsonlFormat: {}
parquetFormat:
decimalAsFloat: false
unstructuredDocumentFormat:
processing:
local: {}
viaApi:
apiKey: string
apiUrl: string
parameters:
- name: string
value: string
skipUnprocessableFiles: false
strategy: string
globs:
- string
inputSchema: string
name: string
recentNFilesToReadForSchemaDiscovery: 0
schemaless: false
validationPolicy: string
username: string
definitionId: string
name: string
secretId: string
workspaceId: string
SourceSftpBulk Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The SourceSftpBulk resource accepts the following input properties:
- Configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- Secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- Configuration
Source
Sftp Bulk Configuration Args - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- Secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secret
Id String - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- workspace
Id string - definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the source e.g. dev-mysql-instance.
- secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
Source
Sftp Bulk Configuration Args - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- workspace_
id str - definition_
id str - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the source e.g. dev-mysql-instance.
- secret_
id str - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration Property Map
- Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secret
Id String - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
Outputs
All input properties are implicitly available as output properties. Additionally, the SourceSftpBulk resource produces the following output properties:
- Created
At double - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Source
Id string - Source
Type string
- Created
At float64 - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Source
Id string - Source
Type string
- created
At Double - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- source
Id String - source
Type String
- created
At number - id string
- The provider-assigned unique ID for this managed resource.
- resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- source
Id string - source
Type string
- created_
at float - id str
- The provider-assigned unique ID for this managed resource.
- resource_
allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- source_
id str - source_
type str
- created
At Number - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- source
Id String - source
Type String
Look up Existing SourceSftpBulk Resource
Get an existing SourceSftpBulk resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SourceSftpBulkState, opts?: CustomResourceOptions): SourceSftpBulk
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[SourceSftpBulkConfigurationArgs] = None,
created_at: Optional[float] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None,
resource_allocation: Optional[SourceSftpBulkResourceAllocationArgs] = None,
secret_id: Optional[str] = None,
source_id: Optional[str] = None,
source_type: Optional[str] = None,
workspace_id: Optional[str] = None) -> SourceSftpBulk
func GetSourceSftpBulk(ctx *Context, name string, id IDInput, state *SourceSftpBulkState, opts ...ResourceOption) (*SourceSftpBulk, error)
public static SourceSftpBulk Get(string name, Input<string> id, SourceSftpBulkState? state, CustomResourceOptions? opts = null)
public static SourceSftpBulk get(String name, Output<String> id, SourceSftpBulkState state, CustomResourceOptions options)
resources: _: type: airbyte:SourceSftpBulk get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- Created
At double - Definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- Resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- Source
Id string - Source
Type string - Workspace
Id string
- Configuration
Source
Sftp Bulk Configuration Args - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- Created
At float64 - Definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- Resource
Allocation SourceSftp Bulk Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- Source
Id string - Source
Type string - Workspace
Id string
- configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- created
At Double - definition
Id String - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- secret
Id String - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- source
Id String - source
Type String - workspace
Id String
- configuration
Source
Sftp Bulk Configuration - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- created
At number - definition
Id string - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the source e.g. dev-mysql-instance.
- resource
Allocation SourceSftp Bulk Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- secret
Id string - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- source
Id string - source
Type string - workspace
Id string
- configuration
Source
Sftp Bulk Configuration Args - Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- created_
at float - definition_
id str - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the source e.g. dev-mysql-instance.
- resource_
allocation SourceSftp Bulk Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- secret_
id str - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- source_
id str - source_
type str - workspace_
id str
- configuration Property Map
- Used during spec; allows the developer to configure the cloud provider specific options that are needed when users configure a file-based source.
- created
At Number - definition
Id String - The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- secret
Id String - Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- source
Id String - source
Type String - workspace
Id String
Supporting Types
SourceSftpBulkConfiguration, SourceSftpBulkConfigurationArgs
- Credentials
Source
Sftp Bulk Configuration Credentials - Credentials for connecting to the SFTP Server
- Host string
- The server host address
- Streams
List<Source
Sftp Bulk Configuration Stream> - Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- Username string
- The server user
- Delivery
Method SourceSftp Bulk Configuration Delivery Method - Folder
Path string - The directory to search files for sync. Default: "/"
- Port double
- The server port. Default: 22
- Start
Date string - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
- Credentials
Source
Sftp Bulk Configuration Credentials - Credentials for connecting to the SFTP Server
- Host string
- The server host address
- Streams
[]Source
Sftp Bulk Configuration Stream - Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- Username string
- The server user
- Delivery
Method SourceSftp Bulk Configuration Delivery Method - Folder
Path string - The directory to search files for sync. Default: "/"
- Port float64
- The server port. Default: 22
- Start
Date string - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
- credentials
Source
Sftp Bulk Configuration Credentials - Credentials for connecting to the SFTP Server
- host String
- The server host address
- streams
List<Source
Sftp Bulk Configuration Stream> - Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- username String
- The server user
- delivery
Method SourceSftp Bulk Configuration Delivery Method - folder
Path String - The directory to search files for sync. Default: "/"
- port Double
- The server port. Default: 22
- start
Date String - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
- credentials
Source
Sftp Bulk Configuration Credentials - Credentials for connecting to the SFTP Server
- host string
- The server host address
- streams
Source
Sftp Bulk Configuration Stream[] - Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- username string
- The server user
- delivery
Method SourceSftp Bulk Configuration Delivery Method - folder
Path string - The directory to search files for sync. Default: "/"
- port number
- The server port. Default: 22
- start
Date string - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
- credentials
Source
Sftp Bulk Configuration Credentials - Credentials for connecting to the SFTP Server
- host str
- The server host address
- streams
Sequence[Source
Sftp Bulk Configuration Stream] - Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- username str
- The server user
- delivery_
method SourceSftp Bulk Configuration Delivery Method - folder_
path str - The directory to search files for sync. Default: "/"
- port float
- The server port. Default: 22
- start_
date str - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
- credentials Property Map
- Credentials for connecting to the SFTP Server
- host String
- The server host address
- streams List<Property Map>
- Each instance of this configuration defines a \n\nstream\n\n. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.
- username String
- The server user
- delivery
Method Property Map - folder
Path String - The directory to search files for sync. Default: "/"
- port Number
- The server port. Default: 22
- start
Date String - UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.
SourceSftpBulkConfigurationCredentials, SourceSftpBulkConfigurationCredentialsArgs
SourceSftpBulkConfigurationCredentialsAuthenticateViaPassword, SourceSftpBulkConfigurationCredentialsAuthenticateViaPasswordArgs
- Password string
- Password
- Password string
- Password
- password String
- Password
- password string
- Password
- password str
- Password
- password String
- Password
SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKey, SourceSftpBulkConfigurationCredentialsAuthenticateViaPrivateKeyArgs
- Private
Key string - The Private key
- Private
Key string - The Private key
- private
Key String - The Private key
- private
Key string - The Private key
- private_
key str - The Private key
- private
Key String - The Private key
SourceSftpBulkConfigurationDeliveryMethod, SourceSftpBulkConfigurationDeliveryMethodArgs
- Copy
Raw SourceFiles Sftp Bulk Configuration Delivery Method Copy Raw Files - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- Replicate
Records SourceSftp Bulk Configuration Delivery Method Replicate Records - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
- Copy
Raw SourceFiles Sftp Bulk Configuration Delivery Method Copy Raw Files - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- Replicate
Records SourceSftp Bulk Configuration Delivery Method Replicate Records - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
- copy
Raw SourceFiles Sftp Bulk Configuration Delivery Method Copy Raw Files - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- replicate
Records SourceSftp Bulk Configuration Delivery Method Replicate Records - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
- copy
Raw SourceFiles Sftp Bulk Configuration Delivery Method Copy Raw Files - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- replicate
Records SourceSftp Bulk Configuration Delivery Method Replicate Records - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
- copy_
raw_ Sourcefiles Sftp Bulk Configuration Delivery Method Copy Raw Files - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- replicate_
records SourceSftp Bulk Configuration Delivery Method Replicate Records - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
- copy
Raw Property MapFiles - Copy raw files without parsing their contents. Bits are copied into the destination exactly as they appeared in the source. Recommended for use with unstructured text data, non-text and compressed files.
- replicate
Records Property Map - Recommended - Extract and load structured records into your destination of choice. This is the classic method of moving data in Airbyte. It allows for blocking and hashing individual fields or files from a structured schema. Data can be flattened, typed and deduped depending on the destination.
SourceSftpBulkConfigurationDeliveryMethodCopyRawFiles, SourceSftpBulkConfigurationDeliveryMethodCopyRawFilesArgs
- Preserve
Directory boolStructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
- Preserve
Directory boolStructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
- preserve
Directory BooleanStructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
- preserve
Directory booleanStructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
- preserve_
directory_ boolstructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
- preserve
Directory BooleanStructure - If enabled, sends subdirectory folder structure along with source file names to the destination. Otherwise, files will be synced by their names only. This option is ignored when file-based replication is not enabled. Default: true
SourceSftpBulkConfigurationStream, SourceSftpBulkConfigurationStreamArgs
- Format
Source
Sftp Bulk Configuration Stream Format - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- Name string
- The name of the stream.
- Days
To doubleSync If History Is Full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- Globs List<string>
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- Input
Schema string - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- Recent
NFiles doubleTo Read For Schema Discovery - The number of resent files which will be used to discover the schema for this stream.
- Schemaless bool
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- Validation
Policy string - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
- Format
Source
Sftp Bulk Configuration Stream Format - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- Name string
- The name of the stream.
- Days
To float64Sync If History Is Full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- Globs []string
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- Input
Schema string - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- Recent
NFiles float64To Read For Schema Discovery - The number of resent files which will be used to discover the schema for this stream.
- Schemaless bool
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- Validation
Policy string - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
- format
Source
Sftp Bulk Configuration Stream Format - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- name String
- The name of the stream.
- days
To DoubleSync If History Is Full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- globs List<String>
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- input
Schema String - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- recent
NFiles DoubleTo Read For Schema Discovery - The number of resent files which will be used to discover the schema for this stream.
- schemaless Boolean
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- validation
Policy String - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
- format
Source
Sftp Bulk Configuration Stream Format - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- name string
- The name of the stream.
- days
To numberSync If History Is Full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- globs string[]
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- input
Schema string - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- recent
NFiles numberTo Read For Schema Discovery - The number of resent files which will be used to discover the schema for this stream.
- schemaless boolean
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- validation
Policy string - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
- format
Source
Sftp Bulk Configuration Stream Format - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- name str
- The name of the stream.
- days_
to_ floatsync_ if_ history_ is_ full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- globs Sequence[str]
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- input_
schema str - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- recent_
n_ floatfiles_ to_ read_ for_ schema_ discovery - The number of resent files which will be used to discover the schema for this stream.
- schemaless bool
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- validation_
policy str - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
- format Property Map
- The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.
- name String
- The name of the stream.
- days
To NumberSync If History Is Full - When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. Default: 3
- globs List<String>
- The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look \n\nhere\n\n.
- input
Schema String - The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.
- recent
NFiles NumberTo Read For Schema Discovery - The number of resent files which will be used to discover the schema for this stream.
- schemaless Boolean
- When enabled, syncs will not validate or structure records against the stream's schema. Default: false
- validation
Policy String - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. Default: "Emit Record"; must be one of ["Emit Record", "Skip Record", "Wait for Discover"]
SourceSftpBulkConfigurationStreamFormat, SourceSftpBulkConfigurationStreamFormatArgs
- Avro
Format SourceSftp Bulk Configuration Stream Format Avro Format - Csv
Format SourceSftp Bulk Configuration Stream Format Csv Format - Excel
Format SourceSftp Bulk Configuration Stream Format Excel Format - Jsonl
Format SourceSftp Bulk Configuration Stream Format Jsonl Format - Parquet
Format SourceSftp Bulk Configuration Stream Format Parquet Format - Unstructured
Document SourceFormat Sftp Bulk Configuration Stream Format Unstructured Document Format - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
- Avro
Format SourceSftp Bulk Configuration Stream Format Avro Format - Csv
Format SourceSftp Bulk Configuration Stream Format Csv Format - Excel
Format SourceSftp Bulk Configuration Stream Format Excel Format - Jsonl
Format SourceSftp Bulk Configuration Stream Format Jsonl Format - Parquet
Format SourceSftp Bulk Configuration Stream Format Parquet Format - Unstructured
Document SourceFormat Sftp Bulk Configuration Stream Format Unstructured Document Format - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
- avro
Format SourceSftp Bulk Configuration Stream Format Avro Format - csv
Format SourceSftp Bulk Configuration Stream Format Csv Format - excel
Format SourceSftp Bulk Configuration Stream Format Excel Format - jsonl
Format SourceSftp Bulk Configuration Stream Format Jsonl Format - parquet
Format SourceSftp Bulk Configuration Stream Format Parquet Format - unstructured
Document SourceFormat Sftp Bulk Configuration Stream Format Unstructured Document Format - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
- avro
Format SourceSftp Bulk Configuration Stream Format Avro Format - csv
Format SourceSftp Bulk Configuration Stream Format Csv Format - excel
Format SourceSftp Bulk Configuration Stream Format Excel Format - jsonl
Format SourceSftp Bulk Configuration Stream Format Jsonl Format - parquet
Format SourceSftp Bulk Configuration Stream Format Parquet Format - unstructured
Document SourceFormat Sftp Bulk Configuration Stream Format Unstructured Document Format - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
- avro_
format SourceSftp Bulk Configuration Stream Format Avro Format - csv_
format SourceSftp Bulk Configuration Stream Format Csv Format - excel_
format SourceSftp Bulk Configuration Stream Format Excel Format - jsonl_
format SourceSftp Bulk Configuration Stream Format Jsonl Format - parquet_
format SourceSftp Bulk Configuration Stream Format Parquet Format - unstructured_
document_ Sourceformat Sftp Bulk Configuration Stream Format Unstructured Document Format - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
- avro
Format Property Map - csv
Format Property Map - excel
Format Property Map - jsonl
Format Property Map - parquet
Format Property Map - unstructured
Document Property MapFormat - Extract text from document formats (.pdf, .docx, .md, .pptx) and emit as one record per file.
SourceSftpBulkConfigurationStreamFormatAvroFormat, SourceSftpBulkConfigurationStreamFormatAvroFormatArgs
- Double
As boolString - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
- Double
As boolString - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
- double
As BooleanString - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
- double
As booleanString - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
- double_
as_ boolstring - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
- double
As BooleanString - Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. Default: false
SourceSftpBulkConfigurationStreamFormatCsvFormat, SourceSftpBulkConfigurationStreamFormatCsvFormatArgs
- Delimiter string
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- Double
Quote bool - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- Encoding string
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- Escape
Char string - The character used for escaping special characters. To disallow escaping, leave this field blank.
- False
Values List<string> - A set of case-sensitive strings that should be interpreted as false values.
- Header
Definition SourceSftp Bulk Configuration Stream Format Csv Format Header Definition - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - Ignore
Errors boolOn Fields Mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- Null
Values List<string> - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- Quote
Char string - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- Skip
Rows doubleAfter Header - The number of rows to skip after the header row. Default: 0
- Skip
Rows doubleBefore Header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- Strings
Can boolBe Null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- True
Values List<string> - A set of case-sensitive strings that should be interpreted as true values.
- Delimiter string
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- Double
Quote bool - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- Encoding string
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- Escape
Char string - The character used for escaping special characters. To disallow escaping, leave this field blank.
- False
Values []string - A set of case-sensitive strings that should be interpreted as false values.
- Header
Definition SourceSftp Bulk Configuration Stream Format Csv Format Header Definition - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - Ignore
Errors boolOn Fields Mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- Null
Values []string - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- Quote
Char string - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- Skip
Rows float64After Header - The number of rows to skip after the header row. Default: 0
- Skip
Rows float64Before Header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- Strings
Can boolBe Null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- True
Values []string - A set of case-sensitive strings that should be interpreted as true values.
- delimiter String
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- double
Quote Boolean - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- encoding String
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- escape
Char String - The character used for escaping special characters. To disallow escaping, leave this field blank.
- false
Values List<String> - A set of case-sensitive strings that should be interpreted as false values.
- header
Definition SourceSftp Bulk Configuration Stream Format Csv Format Header Definition - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - ignore
Errors BooleanOn Fields Mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- null
Values List<String> - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- quote
Char String - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- skip
Rows DoubleAfter Header - The number of rows to skip after the header row. Default: 0
- skip
Rows DoubleBefore Header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- strings
Can BooleanBe Null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- true
Values List<String> - A set of case-sensitive strings that should be interpreted as true values.
- delimiter string
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- double
Quote boolean - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- encoding string
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- escape
Char string - The character used for escaping special characters. To disallow escaping, leave this field blank.
- false
Values string[] - A set of case-sensitive strings that should be interpreted as false values.
- header
Definition SourceSftp Bulk Configuration Stream Format Csv Format Header Definition - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - ignore
Errors booleanOn Fields Mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- null
Values string[] - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- quote
Char string - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- skip
Rows numberAfter Header - The number of rows to skip after the header row. Default: 0
- skip
Rows numberBefore Header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- strings
Can booleanBe Null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- true
Values string[] - A set of case-sensitive strings that should be interpreted as true values.
- delimiter str
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- double_
quote bool - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- encoding str
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- escape_
char str - The character used for escaping special characters. To disallow escaping, leave this field blank.
- false_
values Sequence[str] - A set of case-sensitive strings that should be interpreted as false values.
- header_
definition SourceSftp Bulk Configuration Stream Format Csv Format Header Definition - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - ignore_
errors_ boolon_ fields_ mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- null_
values Sequence[str] - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- quote_
char str - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- skip_
rows_ floatafter_ header - The number of rows to skip after the header row. Default: 0
- skip_
rows_ floatbefore_ header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- strings_
can_ boolbe_ null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- true_
values Sequence[str] - A set of case-sensitive strings that should be interpreted as true values.
- delimiter String
- The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. Default: ","
- double
Quote Boolean - Whether two quotes in a quoted CSV value denote a single quote in the data. Default: true
- encoding String
- The character encoding of the CSV data. Leave blank to default to \n\nUTF8\n\n. See \n\nlist of python encodings\n\n for allowable options. Default: "utf8"
- escape
Char String - The character used for escaping special characters. To disallow escaping, leave this field blank.
- false
Values List<String> - A set of case-sensitive strings that should be interpreted as false values.
- header
Definition Property Map - How headers will be defined.
User Provided
assumes the CSV does not have a header row and uses the headers provided andAutogenerated
assumes the CSV does not have a header row and the CDK will generate headers using forf{i}
wherei
is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. - ignore
Errors BooleanOn Fields Mismatch - Whether to ignore errors that occur when the number of fields in the CSV does not match the number of columns in the schema. Default: false
- null
Values List<String> - A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.
- quote
Char String - The character used for quoting CSV values. To disallow quoting, make this field blank. Default: """
- skip
Rows NumberAfter Header - The number of rows to skip after the header row. Default: 0
- skip
Rows NumberBefore Header - The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. Default: 0
- strings
Can BooleanBe Null - Whether strings can be interpreted as null values. If true, strings that match the nullvalues set will be interpreted as null. If false, strings that match the nullvalues set will be interpreted as the string itself. Default: true
- true
Values List<String> - A set of case-sensitive strings that should be interpreted as true values.
SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinition, SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionArgs
SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvided, SourceSftpBulkConfigurationStreamFormatCsvFormatHeaderDefinitionUserProvidedArgs
- Column
Names List<string> - The column names that will be used while emitting the CSV records
- Column
Names []string - The column names that will be used while emitting the CSV records
- column
Names List<String> - The column names that will be used while emitting the CSV records
- column
Names string[] - The column names that will be used while emitting the CSV records
- column_
names Sequence[str] - The column names that will be used while emitting the CSV records
- column
Names List<String> - The column names that will be used while emitting the CSV records
SourceSftpBulkConfigurationStreamFormatParquetFormat, SourceSftpBulkConfigurationStreamFormatParquetFormatArgs
- Decimal
As boolFloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
- Decimal
As boolFloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
- decimal
As BooleanFloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
- decimal
As booleanFloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
- decimal_
as_ boolfloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
- decimal
As BooleanFloat - Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. Default: false
SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormat, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatArgs
- Processing
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing - Processing configuration
- Skip
Unprocessable boolFiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- Strategy string
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
- Processing
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing - Processing configuration
- Skip
Unprocessable boolFiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- Strategy string
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
- processing
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing - Processing configuration
- skip
Unprocessable BooleanFiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- strategy String
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
- processing
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing - Processing configuration
- skip
Unprocessable booleanFiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- strategy string
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
- processing
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing - Processing configuration
- skip_
unprocessable_ boolfiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- strategy str
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
- processing Property Map
- Processing configuration
- skip
Unprocessable BooleanFiles - If true, skip files that cannot be parsed and pass the error message along as the absourcefileparse_error field. If false, fail the sync. Default: true
- strategy String
- The strategy used to parse documents.
fast
extracts text directly from the document which doesn't work for all files.ocr_only
is more reliable, but slower.hi_res
is the most reliable, but requires an API key and a hosted instance of unstructured and can't be used with local mode. See the unstructured.io documentation for more details: https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf. Default: "auto"; must be one of ["auto", "fast", "ocronly", "hires"]
SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessing, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingArgs
- Local
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Local - Process files locally, supporting
fast
andocr
modes. This is the default option. - Via
Api SourceSftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
- Local
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Local - Process files locally, supporting
fast
andocr
modes. This is the default option. - Via
Api SourceSftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
- local
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Local - Process files locally, supporting
fast
andocr
modes. This is the default option. - via
Api SourceSftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
- local
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Local - Process files locally, supporting
fast
andocr
modes. This is the default option. - via
Api SourceSftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
- local
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Local - Process files locally, supporting
fast
andocr
modes. This is the default option. - via_
api SourceSftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
- local Property Map
- Process files locally, supporting
fast
andocr
modes. This is the default option. - via
Api Property Map - Process files via an API, using the
hi_res
mode. This option is useful for increased performance and accuracy, but requires an API key and a hosted instance of unstructured.
SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApi, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiArgs
- Api
Key string - The API key to use matching the environment. Default: ""
- Api
Url string - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- Parameters
List<Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api Parameter> - List of parameters send to the API
- Api
Key string - The API key to use matching the environment. Default: ""
- Api
Url string - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- Parameters
[]Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api Parameter - List of parameters send to the API
- api
Key String - The API key to use matching the environment. Default: ""
- api
Url String - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- parameters
List<Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api Parameter> - List of parameters send to the API
- api
Key string - The API key to use matching the environment. Default: ""
- api
Url string - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- parameters
Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api Parameter[] - List of parameters send to the API
- api_
key str - The API key to use matching the environment. Default: ""
- api_
url str - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- parameters
Sequence[Source
Sftp Bulk Configuration Stream Format Unstructured Document Format Processing Via Api Parameter] - List of parameters send to the API
- api
Key String - The API key to use matching the environment. Default: ""
- api
Url String - The URL of the unstructured API to use. Default: "https://api.unstructured.io"
- parameters List<Property Map>
- List of parameters send to the API
SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameter, SourceSftpBulkConfigurationStreamFormatUnstructuredDocumentFormatProcessingViaApiParameterArgs
SourceSftpBulkResourceAllocation, SourceSftpBulkResourceAllocationArgs
- Default
Source
Sftp Bulk Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics List<SourceSftp Bulk Resource Allocation Job Specific>
- Default
Source
Sftp Bulk Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics []SourceSftp Bulk Resource Allocation Job Specific
- default_
Source
Sftp Bulk Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<SourceSftp Bulk Resource Allocation Job Specific>
- default
Source
Sftp Bulk Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics SourceSftp Bulk Resource Allocation Job Specific[]
- default
Source
Sftp Bulk Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job_
specifics Sequence[SourceSftp Bulk Resource Allocation Job Specific]
- default Property Map
- optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<Property Map>
SourceSftpBulkResourceAllocationDefault, SourceSftpBulkResourceAllocationDefaultArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
SourceSftpBulkResourceAllocationJobSpecific, SourceSftpBulkResourceAllocationJobSpecificArgs
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements SourceSftp Bulk Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements SourceSftp Bulk Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements SourceSftp Bulk Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements SourceSftp Bulk Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job_
type str - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource_
requirements SourceSftp Bulk Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements Property Map - optional resource requirements to run workers (blank for unbounded allocations)
SourceSftpBulkResourceAllocationJobSpecificResourceRequirements, SourceSftpBulkResourceAllocationJobSpecificResourceRequirementsArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
Import
$ pulumi import airbyte:index/sourceSftpBulk:SourceSftpBulk my_airbyte_source_sftp_bulk ""
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the
airbyte
Terraform Provider.