airbyte.DestinationSnowflakeCortex
Explore with Pulumi AI
DestinationSnowflakeCortex Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.DestinationSnowflakeCortex;
import com.pulumi.airbyte.DestinationSnowflakeCortexArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs;
import com.pulumi.airbyte.inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var myDestinationSnowflakecortex = new DestinationSnowflakeCortex("myDestinationSnowflakecortex", DestinationSnowflakeCortexArgs.builder()
.configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
.embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
.fake()
.build())
.indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
.credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
.password("AIRBYTE_PASSWORD")
.build())
.database("AIRBYTE_DATABASE")
.defaultSchema("AIRBYTE_SCHEMA")
.host("AIRBYTE_ACCOUNT")
.role("AIRBYTE_ROLE")
.username("AIRBYTE_USER")
.warehouse("AIRBYTE_WAREHOUSE")
.build())
.omit_raw_text(true)
.processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
.chunkOverlap(3)
.chunkSize(6147)
.fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
.fromField("...my_from_field...")
.toField("...my_to_field...")
.build())
.metadataFields("...")
.textFields("...")
.textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
.bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
.keepSeparator(true)
.separators("...")
.build())
.build())
.build())
.build())
.definitionId("4e970f65-b8a4-4398-b19e-2a5644731a72")
.workspaceId("d33dd7fd-91b5-4245-9a6e-0c987c8003c9")
.build());
}
}
resources:
myDestinationSnowflakecortex:
type: airbyte:DestinationSnowflakeCortex
properties:
configuration:
embedding:
fake: {}
indexing:
credentials:
password: AIRBYTE_PASSWORD
database: AIRBYTE_DATABASE
defaultSchema: AIRBYTE_SCHEMA
host: AIRBYTE_ACCOUNT
role: AIRBYTE_ROLE
username: AIRBYTE_USER
warehouse: AIRBYTE_WAREHOUSE
omit_raw_text: true
processing:
chunkOverlap: 3
chunkSize: 6147
fieldNameMappings:
- fromField: '...my_from_field...'
toField: '...my_to_field...'
metadataFields:
- '...'
textFields:
- '...'
textSplitter:
bySeparator:
keepSeparator: true
separators:
- '...'
definitionId: 4e970f65-b8a4-4398-b19e-2a5644731a72
workspaceId: d33dd7fd-91b5-4245-9a6e-0c987c8003c9
Create DestinationSnowflakeCortex Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DestinationSnowflakeCortex(name: string, args: DestinationSnowflakeCortexArgs, opts?: CustomResourceOptions);
@overload
def DestinationSnowflakeCortex(resource_name: str,
args: DestinationSnowflakeCortexArgs,
opts: Optional[ResourceOptions] = None)
@overload
def DestinationSnowflakeCortex(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
workspace_id: Optional[str] = None,
definition_id: Optional[str] = None,
name: Optional[str] = None)
func NewDestinationSnowflakeCortex(ctx *Context, name string, args DestinationSnowflakeCortexArgs, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)
public DestinationSnowflakeCortex(string name, DestinationSnowflakeCortexArgs args, CustomResourceOptions? opts = null)
public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args)
public DestinationSnowflakeCortex(String name, DestinationSnowflakeCortexArgs args, CustomResourceOptions options)
type: airbyte:DestinationSnowflakeCortex
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DestinationSnowflakeCortexArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var destinationSnowflakeCortexResource = new Airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", new()
{
Configuration = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationArgs
{
Embedding = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingArgs
{
AzureOpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs
{
ApiBase = "string",
Deployment = "string",
OpenaiKey = "string",
},
Cohere = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs
{
CohereKey = "string",
},
Fake = null,
OpenAi = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs
{
OpenaiKey = "string",
},
OpenAiCompatible = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs
{
BaseUrl = "string",
Dimensions = 0,
ApiKey = "string",
ModelName = "string",
},
},
Indexing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingArgs
{
Credentials = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs
{
Password = "string",
},
Database = "string",
DefaultSchema = "string",
Host = "string",
Role = "string",
Username = "string",
Warehouse = "string",
},
Processing = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingArgs
{
ChunkSize = 0,
ChunkOverlap = 0,
FieldNameMappings = new[]
{
new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs
{
FromField = "string",
ToField = "string",
},
},
MetadataFields = new[]
{
"string",
},
TextFields = new[]
{
"string",
},
TextSplitter = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs
{
ByMarkdownHeader = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs
{
SplitLevel = 0,
},
ByProgrammingLanguage = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs
{
Language = "string",
},
BySeparator = new Airbyte.Inputs.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs
{
KeepSeparator = false,
Separators = new[]
{
"string",
},
},
},
},
OmitRawText = false,
},
WorkspaceId = "string",
DefinitionId = "string",
Name = "string",
});
example, err := airbyte.NewDestinationSnowflakeCortex(ctx, "destinationSnowflakeCortexResource", &airbyte.DestinationSnowflakeCortexArgs{
Configuration: &.DestinationSnowflakeCortexConfigurationArgs{
Embedding: &.DestinationSnowflakeCortexConfigurationEmbeddingArgs{
AzureOpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs{
ApiBase: pulumi.String("string"),
Deployment: pulumi.String("string"),
OpenaiKey: pulumi.String("string"),
},
Cohere: &.DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs{
CohereKey: pulumi.String("string"),
},
Fake: &.DestinationSnowflakeCortexConfigurationEmbeddingFakeArgs{
},
OpenAi: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs{
OpenaiKey: pulumi.String("string"),
},
OpenAiCompatible: &.DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs{
BaseUrl: pulumi.String("string"),
Dimensions: pulumi.Float64(0),
ApiKey: pulumi.String("string"),
ModelName: pulumi.String("string"),
},
},
Indexing: &.DestinationSnowflakeCortexConfigurationIndexingArgs{
Credentials: &.DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs{
Password: pulumi.String("string"),
},
Database: pulumi.String("string"),
DefaultSchema: pulumi.String("string"),
Host: pulumi.String("string"),
Role: pulumi.String("string"),
Username: pulumi.String("string"),
Warehouse: pulumi.String("string"),
},
Processing: &.DestinationSnowflakeCortexConfigurationProcessingArgs{
ChunkSize: pulumi.Float64(0),
ChunkOverlap: pulumi.Float64(0),
FieldNameMappings: .DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArray{
&.DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs{
FromField: pulumi.String("string"),
ToField: pulumi.String("string"),
},
},
MetadataFields: pulumi.StringArray{
pulumi.String("string"),
},
TextFields: pulumi.StringArray{
pulumi.String("string"),
},
TextSplitter: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs{
ByMarkdownHeader: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs{
SplitLevel: pulumi.Float64(0),
},
ByProgrammingLanguage: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs{
Language: pulumi.String("string"),
},
BySeparator: &.DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs{
KeepSeparator: pulumi.Bool(false),
Separators: pulumi.StringArray{
pulumi.String("string"),
},
},
},
},
OmitRawText: pulumi.Bool(false),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
})
var destinationSnowflakeCortexResource = new DestinationSnowflakeCortex("destinationSnowflakeCortexResource", DestinationSnowflakeCortexArgs.builder()
.configuration(DestinationSnowflakeCortexConfigurationArgs.builder()
.embedding(DestinationSnowflakeCortexConfigurationEmbeddingArgs.builder()
.azureOpenAi(DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs.builder()
.apiBase("string")
.deployment("string")
.openaiKey("string")
.build())
.cohere(DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs.builder()
.cohereKey("string")
.build())
.fake()
.openAi(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs.builder()
.openaiKey("string")
.build())
.openAiCompatible(DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs.builder()
.baseUrl("string")
.dimensions(0)
.apiKey("string")
.modelName("string")
.build())
.build())
.indexing(DestinationSnowflakeCortexConfigurationIndexingArgs.builder()
.credentials(DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs.builder()
.password("string")
.build())
.database("string")
.defaultSchema("string")
.host("string")
.role("string")
.username("string")
.warehouse("string")
.build())
.processing(DestinationSnowflakeCortexConfigurationProcessingArgs.builder()
.chunkSize(0)
.chunkOverlap(0)
.fieldNameMappings(DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs.builder()
.fromField("string")
.toField("string")
.build())
.metadataFields("string")
.textFields("string")
.textSplitter(DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs.builder()
.byMarkdownHeader(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs.builder()
.splitLevel(0)
.build())
.byProgrammingLanguage(DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs.builder()
.language("string")
.build())
.bySeparator(DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs.builder()
.keepSeparator(false)
.separators("string")
.build())
.build())
.build())
.omitRawText(false)
.build())
.workspaceId("string")
.definitionId("string")
.name("string")
.build());
destination_snowflake_cortex_resource = airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource",
configuration={
"embedding": {
"azure_open_ai": {
"api_base": "string",
"deployment": "string",
"openai_key": "string",
},
"cohere": {
"cohere_key": "string",
},
"fake": {},
"open_ai": {
"openai_key": "string",
},
"open_ai_compatible": {
"base_url": "string",
"dimensions": 0,
"api_key": "string",
"model_name": "string",
},
},
"indexing": {
"credentials": {
"password": "string",
},
"database": "string",
"default_schema": "string",
"host": "string",
"role": "string",
"username": "string",
"warehouse": "string",
},
"processing": {
"chunk_size": 0,
"chunk_overlap": 0,
"field_name_mappings": [{
"from_field": "string",
"to_field": "string",
}],
"metadata_fields": ["string"],
"text_fields": ["string"],
"text_splitter": {
"by_markdown_header": {
"split_level": 0,
},
"by_programming_language": {
"language": "string",
},
"by_separator": {
"keep_separator": False,
"separators": ["string"],
},
},
},
"omit_raw_text": False,
},
workspace_id="string",
definition_id="string",
name="string")
const destinationSnowflakeCortexResource = new airbyte.DestinationSnowflakeCortex("destinationSnowflakeCortexResource", {
configuration: {
embedding: {
azureOpenAi: {
apiBase: "string",
deployment: "string",
openaiKey: "string",
},
cohere: {
cohereKey: "string",
},
fake: {},
openAi: {
openaiKey: "string",
},
openAiCompatible: {
baseUrl: "string",
dimensions: 0,
apiKey: "string",
modelName: "string",
},
},
indexing: {
credentials: {
password: "string",
},
database: "string",
defaultSchema: "string",
host: "string",
role: "string",
username: "string",
warehouse: "string",
},
processing: {
chunkSize: 0,
chunkOverlap: 0,
fieldNameMappings: [{
fromField: "string",
toField: "string",
}],
metadataFields: ["string"],
textFields: ["string"],
textSplitter: {
byMarkdownHeader: {
splitLevel: 0,
},
byProgrammingLanguage: {
language: "string",
},
bySeparator: {
keepSeparator: false,
separators: ["string"],
},
},
},
omitRawText: false,
},
workspaceId: "string",
definitionId: "string",
name: "string",
});
type: airbyte:DestinationSnowflakeCortex
properties:
configuration:
embedding:
azureOpenAi:
apiBase: string
deployment: string
openaiKey: string
cohere:
cohereKey: string
fake: {}
openAi:
openaiKey: string
openAiCompatible:
apiKey: string
baseUrl: string
dimensions: 0
modelName: string
indexing:
credentials:
password: string
database: string
defaultSchema: string
host: string
role: string
username: string
warehouse: string
omitRawText: false
processing:
chunkOverlap: 0
chunkSize: 0
fieldNameMappings:
- fromField: string
toField: string
metadataFields:
- string
textFields:
- string
textSplitter:
byMarkdownHeader:
splitLevel: 0
byProgrammingLanguage:
language: string
bySeparator:
keepSeparator: false
separators:
- string
definitionId: string
name: string
workspaceId: string
DestinationSnowflakeCortex Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DestinationSnowflakeCortex resource accepts the following input properties:
- Configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- Configuration
Destination
Snowflake Cortex Configuration Args - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- Workspace
Id string - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspace
Id string - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the destination e.g. dev-mysql-instance.
- configuration
Destination
Snowflake Cortex Configuration Args - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspace_
id str - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the destination e.g. dev-mysql-instance.
- configuration Property Map
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- workspace
Id String - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the destination e.g. dev-mysql-instance.
Outputs
All input properties are implicitly available as output properties. Additionally, the DestinationSnowflakeCortex resource produces the following output properties:
- Created
At double - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Created
At float64 - Destination
Id string - Destination
Type string - Id string
- The provider-assigned unique ID for this managed resource.
- Resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Double - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At number - destination
Id string - destination
Type string - id string
- The provider-assigned unique ID for this managed resource.
- resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created_
at float - destination_
id str - destination_
type str - id str
- The provider-assigned unique ID for this managed resource.
- resource_
allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- created
At Number - destination
Id String - destination
Type String - id String
- The provider-assigned unique ID for this managed resource.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
Look up Existing DestinationSnowflakeCortex Resource
Get an existing DestinationSnowflakeCortex resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DestinationSnowflakeCortexState, opts?: CustomResourceOptions): DestinationSnowflakeCortex
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[DestinationSnowflakeCortexConfigurationArgs] = None,
created_at: Optional[float] = None,
definition_id: Optional[str] = None,
destination_id: Optional[str] = None,
destination_type: Optional[str] = None,
name: Optional[str] = None,
resource_allocation: Optional[DestinationSnowflakeCortexResourceAllocationArgs] = None,
workspace_id: Optional[str] = None) -> DestinationSnowflakeCortex
func GetDestinationSnowflakeCortex(ctx *Context, name string, id IDInput, state *DestinationSnowflakeCortexState, opts ...ResourceOption) (*DestinationSnowflakeCortex, error)
public static DestinationSnowflakeCortex Get(string name, Input<string> id, DestinationSnowflakeCortexState? state, CustomResourceOptions? opts = null)
public static DestinationSnowflakeCortex get(String name, Output<String> id, DestinationSnowflakeCortexState state, CustomResourceOptions options)
resources: _: type: airbyte:DestinationSnowflakeCortex get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- Created
At double - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- Configuration
Destination
Snowflake Cortex Configuration Args - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- Created
At float64 - Definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- Destination
Id string - Destination
Type string - Name string
- Name of the destination e.g. dev-mysql-instance.
- Resource
Allocation DestinationSnowflake Cortex Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- Workspace
Id string
- configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- created
At Double - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
- configuration
Destination
Snowflake Cortex Configuration - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- created
At number - definition
Id string - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id string - destination
Type string - name string
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation DestinationSnowflake Cortex Resource Allocation - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id string
- configuration
Destination
Snowflake Cortex Configuration Args - The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- created_
at float - definition_
id str - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination_
id str - destination_
type str - name str
- Name of the destination e.g. dev-mysql-instance.
- resource_
allocation DestinationSnowflake Cortex Resource Allocation Args - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace_
id str
- configuration Property Map
- The configuration model for the Vector DB based destinations. This model is used to generate the UI for the destination configuration, as well as to provide type safety for the configuration passed to the destination.
- created
At Number - definition
Id String - The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
- destination
Id String - destination
Type String - name String
- Name of the destination e.g. dev-mysql-instance.
- resource
Allocation Property Map - actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
- workspace
Id String
Supporting Types
DestinationSnowflakeCortexConfiguration, DestinationSnowflakeCortexConfigurationArgs
- Embedding
Destination
Snowflake Cortex Configuration Embedding - Embedding configuration
- Indexing
Destination
Snowflake Cortex Configuration Indexing - Snowflake can be used to store vector data and retrieve embeddings.
- Processing
Destination
Snowflake Cortex Configuration Processing - Omit
Raw boolText - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- Embedding
Destination
Snowflake Cortex Configuration Embedding - Embedding configuration
- Indexing
Destination
Snowflake Cortex Configuration Indexing - Snowflake can be used to store vector data and retrieve embeddings.
- Processing
Destination
Snowflake Cortex Configuration Processing - Omit
Raw boolText - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
Destination
Snowflake Cortex Configuration Embedding - Embedding configuration
- indexing
Destination
Snowflake Cortex Configuration Indexing - Snowflake can be used to store vector data and retrieve embeddings.
- processing
Destination
Snowflake Cortex Configuration Processing - omit
Raw BooleanText - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
Destination
Snowflake Cortex Configuration Embedding - Embedding configuration
- indexing
Destination
Snowflake Cortex Configuration Indexing - Snowflake can be used to store vector data and retrieve embeddings.
- processing
Destination
Snowflake Cortex Configuration Processing - omit
Raw booleanText - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding
Destination
Snowflake Cortex Configuration Embedding - Embedding configuration
- indexing
Destination
Snowflake Cortex Configuration Indexing - Snowflake can be used to store vector data and retrieve embeddings.
- processing
Destination
Snowflake Cortex Configuration Processing - omit_
raw_ booltext - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
- embedding Property Map
- Embedding configuration
- indexing Property Map
- Snowflake can be used to store vector data and retrieve embeddings.
- processing Property Map
- omit
Raw BooleanText - Do not store the text that gets embedded along with the vector and the metadata in the destination. If set to true, only the vector and the metadata will be stored - in this case raw text for LLM use cases needs to be retrieved from another source. Default: false
DestinationSnowflakeCortexConfigurationEmbedding, DestinationSnowflakeCortexConfigurationEmbeddingArgs
- Azure
Open DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Cohere
Destination
Snowflake Cortex Configuration Embedding Cohere - Use the Cohere API to embed text.
- Fake
Destination
Snowflake Cortex Configuration Embedding Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- Open
Ai DestinationSnowflake Cortex Configuration Embedding Open Ai - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Open
Ai DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible - Use a service that's compatible with the OpenAI API to embed text.
- Azure
Open DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Cohere
Destination
Snowflake Cortex Configuration Embedding Cohere - Use the Cohere API to embed text.
- Fake
Destination
Snowflake Cortex Configuration Embedding Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- Open
Ai DestinationSnowflake Cortex Configuration Embedding Open Ai - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- Open
Ai DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible - Use a service that's compatible with the OpenAI API to embed text.
- azure
Open DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
Destination
Snowflake Cortex Configuration Embedding Cohere - Use the Cohere API to embed text.
- fake
Destination
Snowflake Cortex Configuration Embedding Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- open
Ai DestinationSnowflake Cortex Configuration Embedding Open Ai - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- open
Ai DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible - Use a service that's compatible with the OpenAI API to embed text.
- azure
Open DestinationAi Snowflake Cortex Configuration Embedding Azure Open Ai - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
Destination
Snowflake Cortex Configuration Embedding Cohere - Use the Cohere API to embed text.
- fake
Destination
Snowflake Cortex Configuration Embedding Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- open
Ai DestinationSnowflake Cortex Configuration Embedding Open Ai - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- open
Ai DestinationCompatible Snowflake Cortex Configuration Embedding Open Ai Compatible - Use a service that's compatible with the OpenAI API to embed text.
- azure_
open_ Destinationai Snowflake Cortex Configuration Embedding Azure Open Ai - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere
Destination
Snowflake Cortex Configuration Embedding Cohere - Use the Cohere API to embed text.
- fake
Destination
Snowflake Cortex Configuration Embedding Fake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- open_
ai DestinationSnowflake Cortex Configuration Embedding Open Ai - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- open_
ai_ Destinationcompatible Snowflake Cortex Configuration Embedding Open Ai Compatible - Use a service that's compatible with the OpenAI API to embed text.
- azure
Open Property MapAi - Use the Azure-hosted OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- cohere Property Map
- Use the Cohere API to embed text.
- fake Property Map
- Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.
- open
Ai Property Map - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.
- open
Ai Property MapCompatible - Use a service that's compatible with the OpenAI API to embed text.
DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingAzureOpenAiArgs
- Api
Base string - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Openai
Key string - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Api
Base string - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- Openai
Key string - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- api
Base String - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment String
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openai
Key String - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- api
Base string - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment string
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openai
Key string - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- api_
base str - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment str
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openai_
key str - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- api
Base String - The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- deployment String
- The deployment for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
- openai
Key String - The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource
DestinationSnowflakeCortexConfigurationEmbeddingCohere, DestinationSnowflakeCortexConfigurationEmbeddingCohereArgs
- Cohere
Key string
- Cohere
Key string
- cohere
Key String
- cohere
Key string
- cohere_
key str
- cohere
Key String
DestinationSnowflakeCortexConfigurationEmbeddingOpenAi, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiArgs
- Openai
Key string
- Openai
Key string
- openai
Key String
- openai
Key string
- openai_
key str
- openai
Key String
DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatible, DestinationSnowflakeCortexConfigurationEmbeddingOpenAiCompatibleArgs
- Base
Url string - The base URL for your OpenAI-compatible service
- Dimensions double
- The number of dimensions the embedding model is generating
- Api
Key string - Default: ""
- Model
Name string - The name of the model to use for embedding. Default: "text-embedding-ada-002"
- Base
Url string - The base URL for your OpenAI-compatible service
- Dimensions float64
- The number of dimensions the embedding model is generating
- Api
Key string - Default: ""
- Model
Name string - The name of the model to use for embedding. Default: "text-embedding-ada-002"
- base
Url String - The base URL for your OpenAI-compatible service
- dimensions Double
- The number of dimensions the embedding model is generating
- api
Key String - Default: ""
- model
Name String - The name of the model to use for embedding. Default: "text-embedding-ada-002"
- base
Url string - The base URL for your OpenAI-compatible service
- dimensions number
- The number of dimensions the embedding model is generating
- api
Key string - Default: ""
- model
Name string - The name of the model to use for embedding. Default: "text-embedding-ada-002"
- base_
url str - The base URL for your OpenAI-compatible service
- dimensions float
- The number of dimensions the embedding model is generating
- api_
key str - Default: ""
- model_
name str - The name of the model to use for embedding. Default: "text-embedding-ada-002"
- base
Url String - The base URL for your OpenAI-compatible service
- dimensions Number
- The number of dimensions the embedding model is generating
- api
Key String - Default: ""
- model
Name String - The name of the model to use for embedding. Default: "text-embedding-ada-002"
DestinationSnowflakeCortexConfigurationIndexing, DestinationSnowflakeCortexConfigurationIndexingArgs
- Credentials
Destination
Snowflake Cortex Configuration Indexing Credentials - Database string
- Enter the name of the database that you want to sync data into
- Default
Schema string - Enter the name of the default schema
- Host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- Role string
- Enter the role that you want to use to access Snowflake
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- Credentials
Destination
Snowflake Cortex Configuration Indexing Credentials - Database string
- Enter the name of the database that you want to sync data into
- Default
Schema string - Enter the name of the default schema
- Host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- Role string
- Enter the role that you want to use to access Snowflake
- Username string
- Enter the name of the user you want to use to access the database
- Warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
Destination
Snowflake Cortex Configuration Indexing Credentials - database String
- Enter the name of the database that you want to sync data into
- default
Schema String - Enter the name of the default schema
- host String
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role String
- Enter the role that you want to use to access Snowflake
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
Destination
Snowflake Cortex Configuration Indexing Credentials - database string
- Enter the name of the database that you want to sync data into
- default
Schema string - Enter the name of the default schema
- host string
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role string
- Enter the role that you want to use to access Snowflake
- username string
- Enter the name of the user you want to use to access the database
- warehouse string
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials
Destination
Snowflake Cortex Configuration Indexing Credentials - database str
- Enter the name of the database that you want to sync data into
- default_
schema str - Enter the name of the default schema
- host str
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role str
- Enter the role that you want to use to access Snowflake
- username str
- Enter the name of the user you want to use to access the database
- warehouse str
- Enter the name of the warehouse that you want to use as a compute cluster
- credentials Property Map
- database String
- Enter the name of the database that you want to sync data into
- default
Schema String - Enter the name of the default schema
- host String
- Enter the account name you want to use to access the database. This is usually the identifier before .snowflakecomputing.com
- role String
- Enter the role that you want to use to access Snowflake
- username String
- Enter the name of the user you want to use to access the database
- warehouse String
- Enter the name of the warehouse that you want to use as a compute cluster
DestinationSnowflakeCortexConfigurationIndexingCredentials, DestinationSnowflakeCortexConfigurationIndexingCredentialsArgs
- Password string
- Enter the password you want to use to access the database
- Password string
- Enter the password you want to use to access the database
- password String
- Enter the password you want to use to access the database
- password string
- Enter the password you want to use to access the database
- password str
- Enter the password you want to use to access the database
- password String
- Enter the password you want to use to access the database
DestinationSnowflakeCortexConfigurationProcessing, DestinationSnowflakeCortexConfigurationProcessingArgs
- Chunk
Size double - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- Chunk
Overlap double - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- Field
Name List<DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping> - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- Metadata
Fields List<string> - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - Text
Fields List<string> - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - Text
Splitter DestinationSnowflake Cortex Configuration Processing Text Splitter - Split text fields into chunks based on the specified method.
- Chunk
Size float64 - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- Chunk
Overlap float64 - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- Field
Name []DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- Metadata
Fields []string - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - Text
Fields []string - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - Text
Splitter DestinationSnowflake Cortex Configuration Processing Text Splitter - Split text fields into chunks based on the specified method.
- chunk
Size Double - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunk
Overlap Double - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- field
Name List<DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping> - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadata
Fields List<String> - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - text
Fields List<String> - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - text
Splitter DestinationSnowflake Cortex Configuration Processing Text Splitter - Split text fields into chunks based on the specified method.
- chunk
Size number - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunk
Overlap number - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- field
Name DestinationMappings Snowflake Cortex Configuration Processing Field Name Mapping[] - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadata
Fields string[] - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - text
Fields string[] - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - text
Splitter DestinationSnowflake Cortex Configuration Processing Text Splitter - Split text fields into chunks based on the specified method.
- chunk_
size float - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunk_
overlap float - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- field_
name_ Sequence[Destinationmappings Snowflake Cortex Configuration Processing Field Name Mapping] - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadata_
fields Sequence[str] - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - text_
fields Sequence[str] - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - text_
splitter DestinationSnowflake Cortex Configuration Processing Text Splitter - Split text fields into chunks based on the specified method.
- chunk
Size Number - Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)
- chunk
Overlap Number - Size of overlap between chunks in tokens to store in vector store to better capture relevant context. Default: 0
- field
Name List<Property Map>Mappings - List of fields to rename. Not applicable for nested fields, but can be used to rename fields already flattened via dot notation.
- metadata
Fields List<String> - List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. - text
Fields List<String> - List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g.
user.name
will access thename
field in theuser
object. It's also possible to use wildcards to access all fields in an object, e.g.users.*.name
will access allnames
fields in all entries of theusers
array. - text
Splitter Property Map - Split text fields into chunks based on the specified method.
DestinationSnowflakeCortexConfigurationProcessingFieldNameMapping, DestinationSnowflakeCortexConfigurationProcessingFieldNameMappingArgs
- from_
field str - The field name in the source
- to_
field str - The field name to use in the destination
DestinationSnowflakeCortexConfigurationProcessingTextSplitter, DestinationSnowflakeCortexConfigurationProcessingTextSplitterArgs
- By
Markdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- By
Programming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- By
Separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- By
Markdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- By
Programming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- By
Separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- by
Markdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- by
Programming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- by
Separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- by
Markdown DestinationHeader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- by
Programming DestinationLanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- by
Separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- by_
markdown_ Destinationheader Snowflake Cortex Configuration Processing Text Splitter By Markdown Header - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- by_
programming_ Destinationlanguage Snowflake Cortex Configuration Processing Text Splitter By Programming Language - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- by_
separator DestinationSnowflake Cortex Configuration Processing Text Splitter By Separator - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
- by
Markdown Property MapHeader - Split the text by Markdown headers down to the specified header level. If the chunk size fits multiple sections, they will be combined into a single chunk.
- by
Programming Property MapLanguage - Split the text by suitable delimiters based on the programming language. This is useful for splitting code into chunks.
- by
Separator Property Map - Split the text by the list of separators until the chunk size is reached, using the earlier mentioned separators where possible. This is useful for splitting text fields by paragraphs, sentences, words, etc.
DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeader, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByMarkdownHeaderArgs
- Split
Level double - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- Split
Level float64 - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- split
Level Double - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- split
Level number - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- split_
level float - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
- split
Level Number - Level of markdown headers to split text fields by. Headings down to the specified level will be used as split points. Default: 1
DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguage, DestinationSnowflakeCortexConfigurationProcessingTextSplitterByProgrammingLanguageArgs
- Language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- Language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language String
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language string
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language str
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
- language String
- Split code in suitable places based on the programming language. must be one of ["cpp", "go", "java", "js", "php", "proto", "python", "rst", "ruby", "rust", "scala", "swift", "markdown", "latex", "html", "sol"]
DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparator, DestinationSnowflakeCortexConfigurationProcessingTextSplitterBySeparatorArgs
- Keep
Separator bool - Whether to keep the separator in the resulting chunks. Default: false
- Separators List<string>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- Keep
Separator bool - Whether to keep the separator in the resulting chunks. Default: false
- Separators []string
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keep
Separator Boolean - Whether to keep the separator in the resulting chunks. Default: false
- separators List<String>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keep
Separator boolean - Whether to keep the separator in the resulting chunks. Default: false
- separators string[]
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keep_
separator bool - Whether to keep the separator in the resulting chunks. Default: false
- separators Sequence[str]
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
- keep
Separator Boolean - Whether to keep the separator in the resulting chunks. Default: false
- separators List<String>
- List of separator strings to split text fields by. The separator itself needs to be wrapped in double quotes, e.g. to split by the dot character, use ".". To split by a newline, use "\n".
DestinationSnowflakeCortexResourceAllocation, DestinationSnowflakeCortexResourceAllocationArgs
- Default
Destination
Snowflake Cortex Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics List<DestinationSnowflake Cortex Resource Allocation Job Specific>
- Default
Destination
Snowflake Cortex Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Specifics []DestinationSnowflake Cortex Resource Allocation Job Specific
- default_
Destination
Snowflake Cortex Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<DestinationSnowflake Cortex Resource Allocation Job Specific>
- default
Destination
Snowflake Cortex Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics DestinationSnowflake Cortex Resource Allocation Job Specific[]
- default
Destination
Snowflake Cortex Resource Allocation Default - optional resource requirements to run workers (blank for unbounded allocations)
- job_
specifics Sequence[DestinationSnowflake Cortex Resource Allocation Job Specific]
- default Property Map
- optional resource requirements to run workers (blank for unbounded allocations)
- job
Specifics List<Property Map>
DestinationSnowflakeCortexResourceAllocationDefault, DestinationSnowflakeCortexResourceAllocationDefaultArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
DestinationSnowflakeCortexResourceAllocationJobSpecific, DestinationSnowflakeCortexResourceAllocationJobSpecificArgs
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements DestinationSnowflake Cortex Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- Job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- Resource
Requirements DestinationSnowflake Cortex Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements DestinationSnowflake Cortex Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type string - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements DestinationSnowflake Cortex Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job_
type str - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource_
requirements DestinationSnowflake Cortex Resource Allocation Job Specific Resource Requirements - optional resource requirements to run workers (blank for unbounded allocations)
- job
Type String - enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
- resource
Requirements Property Map - optional resource requirements to run workers (blank for unbounded allocations)
DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirements, DestinationSnowflakeCortexResourceAllocationJobSpecificResourceRequirementsArgs
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- Cpu
Limit string - Cpu
Request string - Ephemeral
Storage stringLimit - Ephemeral
Storage stringRequest - Memory
Limit string - Memory
Request string
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
- cpu
Limit string - cpu
Request string - ephemeral
Storage stringLimit - ephemeral
Storage stringRequest - memory
Limit string - memory
Request string
- cpu_
limit str - cpu_
request str - ephemeral_
storage_ strlimit - ephemeral_
storage_ strrequest - memory_
limit str - memory_
request str
- cpu
Limit String - cpu
Request String - ephemeral
Storage StringLimit - ephemeral
Storage StringRequest - memory
Limit String - memory
Request String
Import
$ pulumi import airbyte:index/destinationSnowflakeCortex:DestinationSnowflakeCortex my_airbyte_destination_snowflake_cortex ""
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the
airbyte
Terraform Provider.