1. Packages
  2. Airbyte Provider
  3. API Docs
  4. DestinationSnowflakeCortex
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

airbyte.DestinationSnowflakeCortex

Explore with Pulumi AI

airbyte logo
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

    DestinationSnowflakeCortex Resource

    Example Usage

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.DestinationSnowflakeCortex;
    import com.pulumi.airbyte.DestinationSnowflakeCortexArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs;
    import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myDestinationSnowflakecortex = new DestinationSnowflakeCortex("myDestinationSnowflakecortex", DestinationSnowflakeCortexArgs.builder()
                .configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
                    .embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
                        .fake()
                        .build())
                    .indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
                        .credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
                            .password("AIRBYTE_PASSWORD")
                            .build())
                        .database("AIRBYTE_DATABASE")
                        .defaultSchema("AIRBYTE_SCHEMA")
                        .host("AIRBYTE_ACCOUNT")
                        .role("AIRBYTE_ROLE")
                        .username("AIRBYTE_USER")
                        .warehouse("AIRBYTE_WAREHOUSE")
                        .build())
                    .omit_raw_text(true)
                    .processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
                        .chunkOverlap(3)
                        .chunkSize(6147)
                        .fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
                            .fromField("...my_from_field...")
                            .toField("...my_to_field...")
                            .build())
                        .metadataFields("...")
                        .textFields("...")
                        .textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
                            .bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
                                .keepSeparator(true)
                                .separators("...")
                                .build())
                            .build())
                        .build())
                    .build())
                .definitionId("4e970f65-b8a4-4398-b19e-2a5644731a72")
                .workspaceId("d33dd7fd-91b5-4245-9a6e-0c987c8003c9")
                .build());
    
        }
    }
    
    resources:
      myDestinationSnowflakecortex:
        type: airbyte:DestinationSnowflakeCortex
        properties:
          configuration:
            embedding:
              fake: {}
            indexing:
              credentials:
                password: AIRBYTE_PASSWORD
              database: AIRBYTE_DATABASE
              defaultSchema: AIRBYTE_SCHEMA
              host: AIRBYTE_ACCOUNT
              role: AIRBYTE_ROLE
              username: AIRBYTE_USER
              warehouse: AIRBYTE_WAREHOUSE
            omit_raw_text: true
            processing:
              chunkOverlap: 3
              chunkSize: 6147
              fieldNameMappings:
                - fromField: '...my_from_field...'
                  toField: '...my_to_field...'
              metadataFields:
                - '...'
              textFields:
                - '...'
              textSplitter:
                bySeparator:
                  keepSeparator: true
                  separators:
                    - '...'
          definitionId: 4e970f65-b8a4-4398-b19e-2a5644731a72
          workspaceId: d33dd7fd-91b5-4245-9a6e-0c987c8003c9
    

    Create DestinationSnowflakeCortex Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DestinationSnowflakeCortex(name: string, args: DestinationSnowflakeCortexArgs, opts?: CustomResourceOptions);
    @overload
    def DestinationSnowflakeCortex(resource_name: str,
                                   args: DestinationSnowflakeCortexArgs,
                                   opts: Optional[ResourceOptions] = None)
    
    @overload
    def DestinationSnowflakeCortex(resource_name: str,
                                   opts: Optional[ResourceOptions] = None,
                                   configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
                                   workspace_id: Optional[str] = None,
                                   definition_id: Optional[str] = None,
                                   name: Optional[str] = None)
    func NewDestinationSnowflakeCortex(ctx *Context, name string, args DestinationSnowflakeCortexArgs, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)
    public DestinationSnowflakeCortex(string name, DestinationSnowflakeCortexArgs args, CustomResourceOptions? opts = null)
    public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args)
    public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args, CustomResourceOptions options)
    
    type: airbyte:DestinationSnowflakeCortex
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DestinationSnowflakeCortexArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DestinationSnowflakeCortexArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DestinationSnowflakeCortexArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DestinationSnowflakeCortexArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DestinationSnowflakeCortexArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var destinationSnowflakeCortexResource = new Airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", new()
    {
        Configuration = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationArgs
        {
            Embedding = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs
            {
                AzureOpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs
                {
                    ApiBase = "string",
                    Deployment = "string",
                    OpenaiKey = "string",
                },
                Cohere = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs
                {
                    CohereKey = "string",
                },
                Fake = null,
                OpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs
                {
                    OpenaiKey = "string",
                },
                OpenAiCompatible = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs
                {
                    BaseUrl = "string",
                    Dimensions = 0,
                    ApiKey = "string",
                    ModelName = "string",
                },
            },
            Indexing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingArgs
            {
                Credentials = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs
                {
                    Password = "string",
                },
                Database = "string",
                DefaultSchema = "string",
                Host = "string",
                Role = "string",
                Username = "string",
                Warehouse = "string",
            },
            Processing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingArgs
            {
                ChunkSize = 0,
                ChunkOverlap = 0,
                FieldNameMappings = new[]
                {
                    new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs
                    {
                        FromField = "string",
                        ToField = "string",
                    },
                },
                MetadataFields = new[]
                {
                    "string",
                },
                TextFields = new[]
                {
                    "string",
                },
                TextSplitter = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs
                {
                    ByMarkdownHeader = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs
                    {
                        SplitLevel = 0,
                    },
                    ByProgrammingLanguage = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs
                    {
                        Language = "string",
                    },
                    BySeparator = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs
                    {
                        KeepSeparator = false,
                        Separators = new[]
                        {
                            "string",
                        },
                    },
                },
            },
            OmitRawText = false,
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
    });
    
    example, err := airbyte.NewDestinationSnowflakeCortex(ctx, "destinationSnowflakeCortexResource", &airbyte.DestinationSnowflakeCortexArgs{
    Configuration: &.DestinationSnowflakeCortexConfigurationArgs{
    Embedding: &.DestinationSnowflakeCortexConfigurationEmbeddingArgs{
    AzureOpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs{
    ApiBase: pulumi.String("string"),
    Deployment: pulumi.String("string"),
    OpenaiKey: pulumi.String("string"),
    },
    Cohere: &.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs{
    CohereKey: pulumi.String("string"),
    },
    Fake: &.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs{
    },
    OpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs{
    OpenaiKey: pulumi.String("string"),
    },
    OpenAiCompatible: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs{
    BaseUrl: pulumi.String("string"),
    Dimensions: pulumi.Float64(0),
    ApiKey: pulumi.String("string"),
    ModelName: pulumi.String("string"),
    },
    },
    Indexing: &.DestinationSnowflakeCortexConfigurationIndexingArgs{
    Credentials: &.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs{
    Password: pulumi.String("string"),
    },
    Database: pulumi.String("string"),
    DefaultSchema: pulumi.String("string"),
    Host: pulumi.String("string"),
    Role: pulumi.String("string"),
    Username: pulumi.String("string"),
    Warehouse: pulumi.String("string"),
    },
    Processing: &.DestinationSnowflakeCortexConfigurationProcessingArgs{
    ChunkSize: pulumi.Float64(0),
    ChunkOverlap: pulumi.Float64(0),
    FieldNameMappings: .DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArray{
    &.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs{
    FromField: pulumi.String("string"),
    ToField: pulumi.String("string"),
    },
    },
    MetadataFields: pulumi.StringArray{
    pulumi.String("string"),
    },
    TextFields: pulumi.StringArray{
    pulumi.String("string"),
    },
    TextSplitter: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs{
    ByMarkdownHeader: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs{
    SplitLevel: pulumi.Float64(0),
    },
    ByProgrammingLanguage: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs{
    Language: pulumi.String("string"),
    },
    BySeparator: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs{
    KeepSeparator: pulumi.Bool(false),
    Separators: pulumi.StringArray{
    pulumi.String("string"),
    },
    },
    },
    },
    OmitRawText: pulumi.Bool(false),
    },
    WorkspaceId: pulumi.String("string"),
    DefinitionId: pulumi.String("string"),
    Name: pulumi.String("string"),
    })
    
    var destinationSnowflakeCortexResource = new DestinationSnowflakeCortex("destinationSnowflakeCortexResource", DestinationSnowflakeCortexArgs.builder()
        .configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
            .embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
                .azureOpenAi(DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs.builder()
                    .apiBase("string")
                    .deployment("string")
                    .openaiKey("string")
                    .build())
                .cohere(DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs.builder()
                    .cohereKey("string")
                    .build())
                .fake()
                .openAi(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs.builder()
                    .openaiKey("string")
                    .build())
                .openAiCompatible(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs.builder()
                    .baseUrl("string")
                    .dimensions(0)
                    .apiKey("string")
                    .modelName("string")
                    .build())
                .build())
            .indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
                .credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
                    .password("string")
                    .build())
                .database("string")
                .defaultSchema("string")
                .host("string")
                .role("string")
                .username("string")
                .warehouse("string")
                .build())
            .processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
                .chunkSize(0)
                .chunkOverlap(0)
                .fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
                    .fromField("string")
                    .toField("string")
                    .build())
                .metadataFields("string")
                .textFields("string")
                .textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
                    .byMarkdownHeader(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs.builder()
                        .splitLevel(0)
                        .build())
                    .byProgrammingLanguage(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs.builder()
                        .language("string")
                        .build())
                    .bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
                        .keepSeparator(false)
                        .separators("string")
                        .build())
                    .build())
                .build())
            .omitRawText(false)
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .build());
    
    destination_snowflake_cortex_resource = airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource",
        configuration={
            "embedding": {
                "azure_open_ai": {
                    "api_base": "string",
                    "deployment": "string",
                    "openai_key": "string",
                },
                "cohere": {
                    "cohere_key": "string",
                },
                "fake": {},
                "open_ai": {
                    "openai_key": "string",
                },
                "open_ai_compatible": {
                    "base_url": "string",
                    "dimensions": 0,
                    "api_key": "string",
                    "model_name": "string",
                },
            },
            "indexing": {
                "credentials": {
                    "password": "string",
                },
                "database": "string",
                "default_schema": "string",
                "host": "string",
                "role": "string",
                "username": "string",
                "warehouse": "string",
            },
            "processing": {
                "chunk_size": 0,
                "chunk_overlap": 0,
                "field_name_mappings": [{
                    "from_field": "string",
                    "to_field": "string",
                }],
                "metadata_fields": ["string"],
                "text_fields": ["string"],
                "text_splitter": {
                    "by_markdown_header": {
                        "split_level": 0,
                    },
                    "by_programming_language": {
                        "language": "string",
                    },
                    "by_separator": {
                        "keep_separator": False,
                        "separators": ["string"],
                    },
                },
            },
            "omit_raw_text": False,
        },
        workspace_id="string",
        definition_id="string",
        name="string")
    
    const destinationSnowflakeCortexResource = new airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", {
        configuration: {
            embedding: {
                azureOpenAi: {
                    apiBase: "string",
                    deployment: "string",
                    openaiKey: "string",
                },
                cohere: {
                    cohereKey: "string",
                },
                fake: {},
                openAi: {
                    openaiKey: "string",
                },
                openAiCompatible: {
                    baseUrl: "string",
                    dimensions: 0,
                    apiKey: "string",
                    modelName: "string",
                },
            },
            indexing: {
                credentials: {
                    password: "string",
                },
                database: "string",
                defaultSchema: "string",
                host: "string",
                role: "string",
                username: "string",
                warehouse: "string",
            },
            processing: {
                chunkSize: 0,
                chunkOverlap: 0,
                fieldNameMappings: [{
                    fromField: "string",
                    toField: "string",
                }],
                metadataFields: ["string"],
                textFields: ["string"],
                textSplitter: {
                    byMarkdownHeader: {
                        splitLevel: 0,
                    },
                    byProgrammingLanguage: {
                        language: "string",
                    },
                    bySeparator: {
                        keepSeparator: false,
                        separators: ["string"],
                    },
                },
            },
            omitRawText: false,
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
    });
    
    type: airbyte:DestinationSnowflakeCortex
    properties:
        configuration:
            embedding:
                azureOpenAi:
                    apiBase: string
                    deployment: string
                    openaiKey: string
                cohere:
                    cohereKey: string
                fake: {}
                openAi:
                    openaiKey: string
                openAiCompatible:
                    apiKey: string
                    baseUrl: string
                    dimensions: 0
                    modelName: string
            indexing:
                credentials:
                    password: string
                database: string
                defaultSchema: string
                host: string
                role: string
                username: string
                warehouse: string
            omitRawText: false
            processing:
                chunkOverlap: 0
                chunkSize: 0
                fieldNameMappings:
                    - fromField: string
                      toField: string
                metadataFields:
                    - string
                textFields:
                    - string
                textSplitter:
                    byMarkdownHeader:
                        splitLevel: 0
                    byProgrammingLanguage:
                        language: string
                    bySeparator:
                        keepSeparator: false
                        separators:
                            - string
        definitionId: string
        name: string
        workspaceId: string
    

    DestinationSnowflakeCortex Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DestinationSnowflakeCortex resource accepts the following input properties:

    Configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    Configuration DestinationSnowflakeCortexConfigurationArgs
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationSnowflakeCortexConfigurationArgs
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name str
    Name of the destination e.g. dev-mysql-instance.
    configuration Property Map
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DestinationSnowflakeCortex resource produces the following output properties:

    CreatedAt double
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    CreatedAt float64
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Double
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt number
    destinationId string
    destinationType string
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    created_at float
    destination_id str
    destination_type str
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Number
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.

    Look up Existing DestinationSnowflakeCortex Resource

    Get an existing DestinationSnowflakeCortex resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DestinationSnowflakeCortexState, opts?: CustomResourceOptions): DestinationSnowflakeCortex
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            destination_id: Optional[str] = None,
            destination_type: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[DestinationSnowflakeCortexResourceAllocationArgs] = None,
            workspace_id: Optional[str] = None) -> DestinationSnowflakeCortex
    func GetDestinationSnowflakeCortex(ctx *Context, name string, id IDInput, state *DestinationSnowflakeCortexState, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)
    public static DestinationSnowflakeCortex Get(string name, Input<string> id, DestinationSnowflakeCortexState? state, CustomResourceOptions? opts = null)
    public static DestinationSnowflakeCortex get(String name, Output<String> id, DestinationSnowflakeCortexState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:DestinationSnowflakeCortex    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    Configuration DestinationSnowflakeCortexConfigurationArgs
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationSnowflakeCortexResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String
    configuration DestinationSnowflakeCortexConfiguration
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId string
    destinationType string
    name string
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationSnowflakeCortexResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId string
    configuration DestinationSnowflakeCortexConfigurationArgs
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destination_id str
    destination_type str
    name str
    Name of the destination e.g. dev-mysql-instance.
    resource_allocation DestinationSnowflakeCortexResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspace_id str
    configuration Property Map
    The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String

    Supporting Types

    DestinationSnowflakeCortexConfiguration, DestinationSnowflakeCortexConfigurationArgs

    Embedding DestinationSnowflakeCortexConfigurationEmbedding
    Embedding configuration
    Indexing DestinationSnowflakeCortexConfigurationIndexing
    Snowflake can be used to store vector data and retrieve embeddings.
    Processing DestinationSnowflakeCortexConfigurationProcessing
    OmitRawText bool
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
    Embedding DestinationSnowflakeCortexConfigurationEmbedding
    Embedding configuration
    Indexing DestinationSnowflakeCortexConfigurationIndexing
    Snowflake can be used to store vector data and retrieve embeddings.
    Processing DestinationSnowflakeCortexConfigurationProcessing
    OmitRawText bool
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
    embedding DestinationSnowflakeCortexConfigurationEmbedding
    Embedding configuration
    indexing DestinationSnowflakeCortexConfigurationIndexing
    Snowflake can be used to store vector data and retrieve embeddings.
    processing DestinationSnowflakeCortexConfigurationProcessing
    omitRawText Boolean
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
    embedding DestinationSnowflakeCortexConfigurationEmbedding
    Embedding configuration
    indexing DestinationSnowflakeCortexConfigurationIndexing
    Snowflake can be used to store vector data and retrieve embeddings.
    processing DestinationSnowflakeCortexConfigurationProcessing
    omitRawText boolean
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
    embedding DestinationSnowflakeCortexConfigurationEmbedding
    Embedding configuration
    indexing DestinationSnowflakeCortexConfigurationIndexing
    Snowflake can be used to store vector data and retrieve embeddings.
    processing DestinationSnowflakeCortexConfigurationProcessing
    omit_raw_text bool
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
    embedding Property Map
    Embedding configuration
    indexing Property Map
    Snowflake can be used to store vector data and retrieve embeddings.
    processing Property Map
    omitRawText Boolean
    Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false

    DestinationSnowflakeCortexConfigurationEmbedding, DestinationSnowflakeCortexConfigurationEmbeddingArgs

    AzureOpenAi DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    Cohere DestinationSnowflakeCortexConfigurationEmbeddingCohere
    Use the Cohere API to embed text.
    Fake DestinationSnowflakeCortexConfigurationEmbeddingFake
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    OpenAi DestinationSnowflakeCortexConfigurationEmbeddingOpenAi
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    OpenAiCompatible DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible
    Use a service that's compatible with the OpenAI API to embed text.
    AzureOpenAi DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    Cohere DestinationSnowflakeCortexConfigurationEmbeddingCohere
    Use the Cohere API to embed text.
    Fake DestinationSnowflakeCortexConfigurationEmbeddingFake
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    OpenAi DestinationSnowflakeCortexConfigurationEmbeddingOpenAi
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    OpenAiCompatible DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible
    Use a service that's compatible with the OpenAI API to embed text.
    azureOpenAi DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    cohere DestinationSnowflakeCortexConfigurationEmbeddingCohere
    Use the Cohere API to embed text.
    fake DestinationSnowflakeCortexConfigurationEmbeddingFake
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    openAi DestinationSnowflakeCortexConfigurationEmbeddingOpenAi
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    openAiCompatible DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible
    Use a service that's compatible with the OpenAI API to embed text.
    azureOpenAi DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    cohere DestinationSnowflakeCortexConfigurationEmbeddingCohere
    Use the Cohere API to embed text.
    fake DestinationSnowflakeCortexConfigurationEmbeddingFake
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    openAi DestinationSnowflakeCortexConfigurationEmbeddingOpenAi
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    openAiCompatible DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible
    Use a service that's compatible with the OpenAI API to embed text.
    azure_open_ai DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    cohere DestinationSnowflakeCortexConfigurationEmbeddingCohere
    Use the Cohere API to embed text.
    fake DestinationSnowflakeCortexConfigurationEmbeddingFake
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    open_ai DestinationSnowflakeCortexConfigurationEmbeddingOpenAi
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    open_ai_compatible DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible
    Use a service that's compatible with the OpenAI API to embed text.
    azureOpenAi Property Map
    Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    cohere Property Map
    Use the Cohere API to embed text.
    fake Property Map
    Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
    openAi Property Map
    Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
    openAiCompatible Property Map
    Use a service that's compatible with the OpenAI API to embed text.

    DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs

    ApiBase string
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    Deployment string
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    OpenaiKey string
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    ApiBase string
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    Deployment string
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    OpenaiKey string
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    apiBase String
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    deployment String
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    openaiKey String
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    apiBase string
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    deployment string
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    openaiKey string
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    api_base str
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    deployment str
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    openai_key str
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    apiBase String
    The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    deployment String
    The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
    openaiKey String
    The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource

    DestinationSnowflakeCortexConfigurationEmbeddingCohere, DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs

    CohereKey string
    CohereKey string
    cohereKey String
    cohereKey string
    cohereKey String

    DestinationSnowflakeCortexConfigurationEmbeddingOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs

    OpenaiKey string
    OpenaiKey string
    openaiKey String
    openaiKey string
    openaiKey String

    DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs

    BaseUrl string
    The base URL for your OpenAI-compatible service
    Dimensions double
    The number of dimensions the embedding model is generating
    ApiKey string
    Default: ""
    ModelName string
    The name of the model to use for embedding. Default: "text-embedding-ada-002"
    BaseUrl string
    The base URL for your OpenAI-compatible service
    Dimensions float64
    The number of dimensions the embedding model is generating
    ApiKey string
    Default: ""
    ModelName string
    The name of the model to use for embedding. Default: "text-embedding-ada-002"
    baseUrl String
    The base URL for your OpenAI-compatible service
    dimensions Double
    The number of dimensions the embedding model is generating
    apiKey String
    Default: ""
    modelName String
    The name of the model to use for embedding. Default: "text-embedding-ada-002"
    baseUrl string
    The base URL for your OpenAI-compatible service
    dimensions number
    The number of dimensions the embedding model is generating
    apiKey string
    Default: ""
    modelName string
    The name of the model to use for embedding. Default: "text-embedding-ada-002"
    base_url str
    The base URL for your OpenAI-compatible service
    dimensions float
    The number of dimensions the embedding model is generating
    api_key str
    Default: ""
    model_name str
    The name of the model to use for embedding. Default: "text-embedding-ada-002"
    baseUrl String
    The base URL for your OpenAI-compatible service
    dimensions Number
    The number of dimensions the embedding model is generating
    apiKey String
    Default: ""
    modelName String
    The name of the model to use for embedding. Default: "text-embedding-ada-002"

    DestinationSnowflakeCortexConfigurationIndexing, DestinationSnowflakeCortexConfigurationIndexingArgs

    Credentials DestinationSnowflakeCortexConfigurationIndexingCredentials
    Database string
    Enter the name of the database that you want to sync data into
    DefaultSchema string
    Enter the name of the default schema
    Host string
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    Role string
    Enter the role that you want to use to access Snowflake
    Username string
    Enter the name of the user you want to use to access the database
    Warehouse string
    Enter the name of the warehouse that you want to use as a compute cluster
    Credentials DestinationSnowflakeCortexConfigurationIndexingCredentials
    Database string
    Enter the name of the database that you want to sync data into
    DefaultSchema string
    Enter the name of the default schema
    Host string
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    Role string
    Enter the role that you want to use to access Snowflake
    Username string
    Enter the name of the user you want to use to access the database
    Warehouse string
    Enter the name of the warehouse that you want to use as a compute cluster
    credentials DestinationSnowflakeCortexConfigurationIndexingCredentials
    database String
    Enter the name of the database that you want to sync data into
    defaultSchema String
    Enter the name of the default schema
    host String
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    role String
    Enter the role that you want to use to access Snowflake
    username String
    Enter the name of the user you want to use to access the database
    warehouse String
    Enter the name of the warehouse that you want to use as a compute cluster
    credentials DestinationSnowflakeCortexConfigurationIndexingCredentials
    database string
    Enter the name of the database that you want to sync data into
    defaultSchema string
    Enter the name of the default schema
    host string
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    role string
    Enter the role that you want to use to access Snowflake
    username string
    Enter the name of the user you want to use to access the database
    warehouse string
    Enter the name of the warehouse that you want to use as a compute cluster
    credentials DestinationSnowflakeCortexConfigurationIndexingCredentials
    database str
    Enter the name of the database that you want to sync data into
    default_schema str
    Enter the name of the default schema
    host str
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    role str
    Enter the role that you want to use to access Snowflake
    username str
    Enter the name of the user you want to use to access the database
    warehouse str
    Enter the name of the warehouse that you want to use as a compute cluster
    credentials Property Map
    database String
    Enter the name of the database that you want to sync data into
    defaultSchema String
    Enter the name of the default schema
    host String
    Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
    role String
    Enter the role that you want to use to access Snowflake
    username String
    Enter the name of the user you want to use to access the database
    warehouse String
    Enter the name of the warehouse that you want to use as a compute cluster

    DestinationSnowflakeCortexConfigurationIndexingCredentials, DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs

    Password string
    Enter the password you want to use to access the database
    Password string
    Enter the password you want to use to access the database
    password String
    Enter the password you want to use to access the database
    password string
    Enter the password you want to use to access the database
    password str
    Enter the password you want to use to access the database
    password String
    Enter the password you want to use to access the database

    DestinationSnowflakeCortexConfigurationProcessing, DestinationSnowflakeCortexConfigurationProcessingArgs

    ChunkSize double
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    ChunkOverlap double
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    FieldNameMappings List<DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping>
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    MetadataFields List<string>
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    TextFields List<string>
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    TextSplitter DestinationSnowflakeCortexConfigurationProcessingTextSplitter
    Split text fields into chunks based on the specified method.
    ChunkSize float64
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    ChunkOverlap float64
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    FieldNameMappings []DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    MetadataFields []string
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    TextFields []string
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    TextSplitter DestinationSnowflakeCortexConfigurationProcessingTextSplitter
    Split text fields into chunks based on the specified method.
    chunkSize Double
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    chunkOverlap Double
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    fieldNameMappings List<DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping>
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    metadataFields List<String>
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    textFields List<String>
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    textSplitter DestinationSnowflakeCortexConfigurationProcessingTextSplitter
    Split text fields into chunks based on the specified method.
    chunkSize number
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    chunkOverlap number
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    fieldNameMappings DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping[]
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    metadataFields string[]
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    textFields string[]
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    textSplitter DestinationSnowflakeCortexConfigurationProcessingTextSplitter
    Split text fields into chunks based on the specified method.
    chunk_size float
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    chunk_overlap float
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    field_name_mappings Sequence[DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping]
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    metadata_fields Sequence[str]
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    text_fields Sequence[str]
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    text_splitter DestinationSnowflakeCortexConfigurationProcessingTextSplitter
    Split text fields into chunks based on the specified method.
    chunkSize Number
    Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
    chunkOverlap Number
    Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
    fieldNameMappings List<Property Map>
    List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
    metadataFields List<String>
    List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.
    textFields List<String>
    List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. user.name will access the name field in the user object. It's also possible to use wildcards to access all fields in an object, e.g. users.*.name will access all names fields in all entries of the users array.
    textSplitter Property Map
    Split text fields into chunks based on the specified method.

    DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping, DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs

    FromField string
    The field name in the source
    ToField string
    The field name to use in the destination
    FromField string
    The field name in the source
    ToField string
    The field name to use in the destination
    fromField String
    The field name in the source
    toField String
    The field name to use in the destination
    fromField string
    The field name in the source
    toField string
    The field name to use in the destination
    from_field str
    The field name in the source
    to_field str
    The field name to use in the destination
    fromField String
    The field name in the source
    toField String
    The field name to use in the destination

    DestinationSnowflakeCortexConfigurationProcessingTextSplitter, DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs

    ByMarkdownHeader DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    ByProgrammingLanguage DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    BySeparator DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
    ByMarkdownHeader DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    ByProgrammingLanguage DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    BySeparator DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
    byMarkdownHeader DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    byProgrammingLanguage DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    bySeparator DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
    byMarkdownHeader DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    byProgrammingLanguage DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    bySeparator DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
    by_markdown_header DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    by_programming_language DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    by_separator DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
    byMarkdownHeader Property Map
    Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
    byProgrammingLanguage Property Map
    Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
    bySeparator Property Map
    Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.

    DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs

    SplitLevel double
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
    SplitLevel float64
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
    splitLevel Double
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
    splitLevel number
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
    split_level float
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
    splitLevel Number
    Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1

    DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs

    Language string
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
    Language string
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
    language String
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
    language string
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
    language str
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
    language String
    Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]

    DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator, DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs

    KeepSeparator bool
    Whether to keep the separator in the resulting chunks. Default: false
    Separators List<string>
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
    KeepSeparator bool
    Whether to keep the separator in the resulting chunks. Default: false
    Separators []string
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
    keepSeparator Boolean
    Whether to keep the separator in the resulting chunks. Default: false
    separators List<String>
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
    keepSeparator boolean
    Whether to keep the separator in the resulting chunks. Default: false
    separators string[]
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
    keep_separator bool
    Whether to keep the separator in the resulting chunks. Default: false
    separators Sequence[str]
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
    keepSeparator Boolean
    Whether to keep the separator in the resulting chunks. Default: false
    separators List<String>
    List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".

    DestinationSnowflakeCortexResourceAllocation, DestinationSnowflakeCortexResourceAllocationArgs

    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    DestinationSnowflakeCortexResourceAllocationDefault, DestinationSnowflakeCortexResourceAllocationDefaultArgs

    DestinationSnowflakeCortexResourceAllocationJobSpecific, DestinationSnowflakeCortexResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resource_requirements DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements, DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    $ pulumi import airbyte:index/destinationSnowflakeCortex:DestinationSnowflakeCortex my_airbyte_destination_snowflake_cortex ""
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq