Commit 396700dd authored by chenzk's avatar chenzk
Browse files

v1.0

parents
Pipeline #2603 failed with stages
in 0 seconds
---
title: "MySQL datasource Configuration"
description: "Fast, reliable, scalable open-source relational database management system."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "MySQLParameters",
"description": "Fast, reliable, scalable open-source relational database management system.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "Database host, e.g., localhost"
},
{
"name": "port",
"type": "integer",
"required": true,
"description": "Database port, e.g., 3306"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "Database user to connect"
},
{
"name": "database",
"type": "string",
"required": true,
"description": "Database name"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name for MySQL, default is mysql+pymysql.",
"defaultValue": "mysql+pymysql"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "pool_size",
"type": "integer",
"required": false,
"description": "Connection pool size, default 5",
"defaultValue": "5"
},
{
"name": "max_overflow",
"type": "integer",
"required": false,
"description": "Max overflow connections, default 10",
"defaultValue": "10"
},
{
"name": "pool_timeout",
"type": "integer",
"required": false,
"description": "Connection pool timeout, default 30",
"defaultValue": "30"
},
{
"name": "pool_recycle",
"type": "integer",
"required": false,
"description": "Connection pool recycle, default 3600",
"defaultValue": "3600"
},
{
"name": "pool_pre_ping",
"type": "boolean",
"required": false,
"description": "Connection pool pre ping, default True",
"defaultValue": "True"
}
]
}} />
---
title: "OceanBase datasource Configuration"
description: "An Ultra-Fast & Cost-Effective Distributed SQL Database."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "OceanBaseParameters",
"description": "An Ultra-Fast & Cost-Effective Distributed SQL Database.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "Database host, e.g., localhost"
},
{
"name": "port",
"type": "integer",
"required": true,
"description": "Database port, e.g., 3306"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "Database user to connect"
},
{
"name": "database",
"type": "string",
"required": true,
"description": "Database name"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name for oceanbase, default is mysql+ob.",
"defaultValue": "mysql+ob"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "pool_size",
"type": "integer",
"required": false,
"description": "Connection pool size, default 5",
"defaultValue": "5"
},
{
"name": "max_overflow",
"type": "integer",
"required": false,
"description": "Max overflow connections, default 10",
"defaultValue": "10"
},
{
"name": "pool_timeout",
"type": "integer",
"required": false,
"description": "Connection pool timeout, default 30",
"defaultValue": "30"
},
{
"name": "pool_recycle",
"type": "integer",
"required": false,
"description": "Connection pool recycle, default 3600",
"defaultValue": "3600"
},
{
"name": "pool_pre_ping",
"type": "boolean",
"required": false,
"description": "Connection pool pre ping, default True",
"defaultValue": "True"
}
]
}} />
---
title: "PostreSQL datasource Configuration"
description: "Powerful open-source relational database with extensibility and SQL standards."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "PostgreSQLParameters",
"description": "Powerful open-source relational database with extensibility and SQL standards.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "Database host, e.g., localhost"
},
{
"name": "port",
"type": "integer",
"required": true,
"description": "Database port, e.g., 3306"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "Database user to connect"
},
{
"name": "database",
"type": "string",
"required": true,
"description": "Database name"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name for postgres, default is postgresql+psycopg2.",
"defaultValue": "postgresql+psycopg2"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "pool_size",
"type": "integer",
"required": false,
"description": "Connection pool size, default 5",
"defaultValue": "5"
},
{
"name": "max_overflow",
"type": "integer",
"required": false,
"description": "Max overflow connections, default 10",
"defaultValue": "10"
},
{
"name": "pool_timeout",
"type": "integer",
"required": false,
"description": "Connection pool timeout, default 30",
"defaultValue": "30"
},
{
"name": "pool_recycle",
"type": "integer",
"required": false,
"description": "Connection pool recycle, default 3600",
"defaultValue": "3600"
},
{
"name": "pool_pre_ping",
"type": "boolean",
"required": false,
"description": "Connection pool pre ping, default True",
"defaultValue": "True"
},
{
"name": "schema",
"type": "string",
"required": false,
"description": "Database schema, defaults to 'public'",
"defaultValue": "public"
}
]
}} />
---
title: "Apache Spark datasource Configuration"
description: "Unified engine for large-scale data analytics."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "SparkParameters",
"description": "Unified engine for large-scale data analytics.",
"documentationUrl": null,
"parameters": [
{
"name": "path",
"type": "string",
"required": true,
"description": "The file path of the data source."
}
]
}} />
---
title: "SQLite datasource Configuration"
description: "Lightweight embedded relational database with simplicity and portability."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "SQLiteConnectorParameters",
"description": "Lightweight embedded relational database with simplicity and portability.",
"documentationUrl": null,
"parameters": [
{
"name": "path",
"type": "string",
"required": true,
"description": "SQLite database file path. Use ':memory:' for in-memory database"
},
{
"name": "check_same_thread",
"type": "boolean",
"required": false,
"description": "Check same thread or not, default is False. Set False to allow sharing connection across threads",
"defaultValue": "False"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name, default is sqlite",
"defaultValue": "sqlite"
}
]
}} />
---
title: "StarRocks datasource Configuration"
description: "An Open-Source, High-Performance Analytical Database."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "StarRocksParameters",
"description": "An Open-Source, High-Performance Analytical Database.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "Database host, e.g., localhost"
},
{
"name": "port",
"type": "integer",
"required": true,
"description": "Database port, e.g., 3306"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "Database user to connect"
},
{
"name": "database",
"type": "string",
"required": true,
"description": "Database name"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name for starrocks, default is starrocks.",
"defaultValue": "starrocks"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "pool_size",
"type": "integer",
"required": false,
"description": "Connection pool size, default 5",
"defaultValue": "5"
},
{
"name": "max_overflow",
"type": "integer",
"required": false,
"description": "Max overflow connections, default 10",
"defaultValue": "10"
},
{
"name": "pool_timeout",
"type": "integer",
"required": false,
"description": "Connection pool timeout, default 30",
"defaultValue": "30"
},
{
"name": "pool_recycle",
"type": "integer",
"required": false,
"description": "Connection pool recycle, default 3600",
"defaultValue": "3600"
},
{
"name": "pool_pre_ping",
"type": "boolean",
"required": false,
"description": "Connection pool pre ping, default True",
"defaultValue": "True"
}
]
}} />
---
title: "TuGraph datasource Configuration"
description: "TuGraph is a high-performance graph database jointly developed by Ant Group and Tsinghua University."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "TuGraphParameters",
"description": "TuGraph is a high-performance graph database jointly developed by Ant Group and Tsinghua University.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "TuGraph server host"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "TuGraph server user"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "port",
"type": "integer",
"required": false,
"description": "TuGraph server port, default 7687",
"defaultValue": "7687"
},
{
"name": "database",
"type": "string",
"required": false,
"description": "Database name, default 'default'",
"defaultValue": "default"
}
]
}} />
---
title: "Vertica datasource Configuration"
description: "Vertica is a strongly consistent, ACID-compliant, SQL data warehouse, built for the scale and complexity of today`s data-driven world."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "VerticaParameters",
"description": "Vertica is a strongly consistent, ACID-compliant, SQL data warehouse, built for the scale and complexity of today`s data-driven world.",
"documentationUrl": null,
"parameters": [
{
"name": "host",
"type": "string",
"required": true,
"description": "Database host, e.g., localhost"
},
{
"name": "port",
"type": "integer",
"required": true,
"description": "Database port, e.g., 3306"
},
{
"name": "user",
"type": "string",
"required": true,
"description": "Database user to connect"
},
{
"name": "database",
"type": "string",
"required": true,
"description": "Database name"
},
{
"name": "driver",
"type": "string",
"required": false,
"description": "Driver name for vertica, default is vertica+vertica_python",
"defaultValue": "vertica+vertica_python"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "Database password, you can write your password directly, of course, you can also use environment variables, such as ${env:DBGPT_DB_PASSWORD}",
"defaultValue": "${env:DBGPT_DB_PASSWORD}"
},
{
"name": "pool_size",
"type": "integer",
"required": false,
"description": "Connection pool size, default 5",
"defaultValue": "5"
},
{
"name": "max_overflow",
"type": "integer",
"required": false,
"description": "Max overflow connections, default 10",
"defaultValue": "10"
},
{
"name": "pool_timeout",
"type": "integer",
"required": false,
"description": "Connection pool timeout, default 30",
"defaultValue": "30"
},
{
"name": "pool_recycle",
"type": "integer",
"required": false,
"description": "Connection pool recycle, default 3600",
"defaultValue": "3600"
},
{
"name": "pool_pre_ping",
"type": "boolean",
"required": false,
"description": "Connection pool pre ping, default True",
"defaultValue": "True"
}
]
}} />
---
title: "datasource"
description: "datasource Configuration"
---
# datasource Configuration
This document provides an overview of all configuration classes in datasource type.
import { ConfigClassTable } from '@site/src/components/mdx/ConfigClassTable';
## Configuration Classes
<ConfigClassTable classes={[
{
"name": "ClickhouseParameters",
"description": "Columnar database for high-performance analytics and real-time queries.",
"link": "./conn_clickhouse_clickhouseparameters_4a1237"
},
{
"name": "DorisParameters",
"description": "A new-generation open-source real-time data warehouse.",
"link": "./conn_doris_dorisparameters_e33c53"
},
{
"name": "DuckDbConnectorParameters",
"description": "In-memory analytical database with efficient query processing.",
"link": "./conn_duckdb_duckdbconnectorparameters_c672c7"
},
{
"name": "HiveParameters",
"description": "A distributed fault-tolerant data warehouse system.",
"link": "./conn_hive_hiveparameters_ec3601"
},
{
"name": "MSSQLParameters",
"description": "Powerful, scalable, secure relational database system by Microsoft.",
"link": "./conn_mssql_mssqlparameters_d79d1c"
},
{
"name": "MySQLParameters",
"description": "Fast, reliable, scalable open-source relational database management system.",
"link": "./conn_mysql_mysqlparameters_4393c4"
},
{
"name": "OceanBaseParameters",
"description": "An Ultra-Fast & Cost-Effective Distributed SQL Database.",
"link": "./conn_oceanbase_oceanbaseparameters_260d2d"
},
{
"name": "PostgreSQLParameters",
"description": "Powerful open-source relational database with extensibility and SQL standards.",
"link": "./conn_postgresql_postgresqlparameters_22efa5"
},
{
"name": "RDBMSDatasourceParameters",
"description": "RDBMS datasource parameters.",
"link": "./base_rdbmsdatasourceparameters_4f774f"
},
{
"name": "SQLiteConnectorParameters",
"description": "Lightweight embedded relational database with simplicity and portability.",
"link": "./conn_sqlite_sqliteconnectorparameters_82c8b5"
},
{
"name": "SparkParameters",
"description": "Unified engine for large-scale data analytics.",
"link": "./conn_spark_sparkparameters_174bbc"
},
{
"name": "StarRocksParameters",
"description": "An Open-Source, High-Performance Analytical Database.",
"link": "./conn_starrocks_starrocksparameters_e511f7"
},
{
"name": "TuGraphParameters",
"description": "TuGraph is a high-performance graph database jointly developed by Ant Group and Tsinghua University.",
"link": "./conn_tugraph_tugraphparameters_0c844e"
},
{
"name": "VerticaParameters",
"description": "Vertica is a strongly consistent, ACID-compliant, SQL data warehouse, built for the scale and complexity of today`s data-driven world.",
"link": "./conn_vertica_verticaparameters_c712b8"
},
]} />
---
title: "HFEmbeddingDeployModelParameters Configuration"
description: "HFEmbeddingDeployModelParameters(name: str, provider: str = 'hf', verbose: Optional[bool] = False, concurrency: Optional[int] = 100, path: Optional[str] = None, device: Optional[str] = None, cache_folder: Optional[str] = None, normalize_embeddings: bool = False, multi_process: bool = False, model_kwargs: Dict[str, Any] = <factory>, encode_kwargs: Dict[str, Any] = <factory>, embed_instruction: Optional[str] = None, query_instruction: Optional[str] = None)"
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "HFEmbeddingDeployModelParameters",
"description": "HFEmbeddingDeployModelParameters(name: str, provider: str = 'hf', verbose: Optional[bool] = False, concurrency: Optional[int] = 100, path: Optional[str] = None, device: Optional[str] = None, cache_folder: Optional[str] = None, normalize_embeddings: bool = False, multi_process: bool = False, model_kwargs: Dict[str, Any] = <factory>, encode_kwargs: Dict[str, Any] = <factory>, embed_instruction: Optional[str] = None, query_instruction: Optional[str] = None)",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "path",
"type": "string",
"required": false,
"description": "The path of the model, if you want to deploy a local model."
},
{
"name": "device",
"type": "string",
"required": false,
"description": "Device to run model. If None, the device is automatically determined"
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "hf"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "cache_folder",
"type": "string",
"required": false,
"description": "Path of the cache folder."
},
{
"name": "normalize_embeddings",
"type": "boolean",
"required": false,
"description": "Normalize embeddings.",
"defaultValue": "False"
},
{
"name": "multi_process",
"type": "boolean",
"required": false,
"description": "Run encode() on multiple GPUs.",
"defaultValue": "False"
},
{
"name": "model_kwargs",
"type": "object",
"required": false,
"description": "Keyword arguments to pass to the model.",
"defaultValue": "{}"
},
{
"name": "encode_kwargs",
"type": "object",
"required": false,
"description": "Keyword arguments to pass when calling the `encode` method.",
"defaultValue": "{}"
},
{
"name": "embed_instruction",
"type": "string",
"required": false,
"description": "Instruction to use for embedding documents. Just for Instructor model."
},
{
"name": "query_instruction",
"type": "string",
"required": false,
"description": "Instruction to use for embedding query. Just for Instructor model."
}
]
}} />
---
title: "OpenAPIEmbeddingDeployModelParameters Configuration"
description: "OpenAPI embedding deploy model parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "OpenAPIEmbeddingDeployModelParameters",
"description": "OpenAPI embedding deploy model parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/openai"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "api_url",
"type": "string",
"required": false,
"description": "The URL of the embeddings API.",
"defaultValue": "http://localhost:8100/api/v1/embeddings"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key for the embeddings API."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
},
{
"name": "timeout",
"type": "integer",
"required": false,
"description": "The timeout for the request in seconds.",
"defaultValue": "60"
}
]
}} />
---
title: "embedding"
description: "embedding Configuration"
---
# embedding Configuration
This document provides an overview of all configuration classes in embedding type.
import { ConfigClassTable } from '@site/src/components/mdx/ConfigClassTable';
## Configuration Classes
<ConfigClassTable classes={[
{
"name": "HFEmbeddingDeployModelParameters",
"description": "HFEmbeddingDeployModelParameters(name: str, provider: str = 'hf', verbose: Optional[bool] = False, concurrency: Optional[int] = 100, path: Optional[str] = None, device: Optional[str] = None, cache_folder: Optional[str] = None, normalize_embeddings: bool = False, multi_process: bool = False, model_kwargs: Dict[str, Any] = <factory>, encode_kwargs: Dict[str, Any] = <factory>, embed_instruction: Optional[str] = None, query_instruction: Optional[str] = None)",
"link": "./embeddings_hfembeddingdeploymodelparameters_f588e1"
},
{
"name": "JinaEmbeddingsDeployModelParameters",
"description": "Jina AI Embeddings deploy model parameters.",
"link": "./jina_jinaembeddingsdeploymodelparameters_40b0f2"
},
{
"name": "OllamaEmbeddingDeployModelParameters",
"description": "Ollama Embeddings deploy model parameters.",
"link": "./ollama_ollamaembeddingdeploymodelparameters_b511e0"
},
{
"name": "OpenAPIEmbeddingDeployModelParameters",
"description": "OpenAPI embedding deploy model parameters.",
"link": "./embeddings_openapiembeddingdeploymodelparameters_f9ba47"
},
{
"name": "QianfanEmbeddingDeployModelParameters",
"description": "Qianfan Embeddings deploy model parameters.",
"link": "./qianfan_qianfanembeddingdeploymodelparameters_257d2a"
},
{
"name": "TongyiEmbeddingDeployModelParameters",
"description": "Qianfan Embeddings deploy model parameters.",
"link": "./tongyi_tongyiembeddingdeploymodelparameters_a7cbb4"
},
]} />
---
title: "JinaEmbeddingsDeployModelParameters Configuration"
description: "Jina AI Embeddings deploy model parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "JinaEmbeddingsDeployModelParameters",
"description": "Jina AI Embeddings deploy model parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/jina"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "api_url",
"type": "string",
"required": false,
"description": "The URL of the embeddings API.",
"defaultValue": "https://api.jina.ai/v1/embeddings"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key for the embeddings API."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name.",
"defaultValue": "jina-embeddings-v2-base-en"
},
{
"name": "timeout",
"type": "integer",
"required": false,
"description": "The timeout for the request in seconds.",
"defaultValue": "60"
}
]
}} />
---
title: "OllamaEmbeddingDeployModelParameters Configuration"
description: "Ollama Embeddings deploy model parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "OllamaEmbeddingDeployModelParameters",
"description": "Ollama Embeddings deploy model parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/ollama"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "api_url",
"type": "string",
"required": false,
"description": "The URL of the embeddings API.",
"defaultValue": "http://localhost:11434"
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
}
]
}} />
---
title: "QianfanEmbeddingDeployModelParameters Configuration"
description: "Qianfan Embeddings deploy model parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "QianfanEmbeddingDeployModelParameters",
"description": "Qianfan Embeddings deploy model parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/qianfan"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key for the embeddings API."
},
{
"name": "api_secret",
"type": "string",
"required": false,
"description": "The Secret key for the embeddings API. It's the sk for qianfan."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
}
]
}} />
---
title: "TongyiEmbeddingDeployModelParameters Configuration"
description: "Qianfan Embeddings deploy model parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "TongyiEmbeddingDeployModelParameters",
"description": "Qianfan Embeddings deploy model parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/tongyi"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key for the embeddings API."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name.",
"defaultValue": "text-embedding-v1"
}
]
}} />
---
title: "graph_store"
description: "graph_store Configuration"
---
# graph_store Configuration
This document provides an overview of all configuration classes in graph_store type.
import { ConfigClassTable } from '@site/src/components/mdx/ConfigClassTable';
## Configuration Classes
<ConfigClassTable classes={[
{
"name": "BuiltinKnowledgeGraphConfig",
"description": "",
"link": "./knowledge_graph_builtinknowledgegraphconfig_f26e05"
},
{
"name": "Neo4jStoreConfig",
"description": "",
"link": "./neo4j_store_neo4jstoreconfig_a4db5d"
},
{
"name": "OpenSPGConfig",
"description": "",
"link": "./open_spg_openspgconfig_a744fd"
},
{
"name": "TuGraphStoreConfig",
"description": "TuGraph store config.",
"link": "./tugraph_store_tugraphstoreconfig_7ca8a8"
},
]} />
---
title: "TuGraphStoreConfig Configuration"
description: "TuGraph store config."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "TuGraphStoreConfig",
"description": "TuGraph store config.",
"documentationUrl": "",
"parameters": [
{
"name": "host",
"type": "string",
"required": false,
"description": "",
"defaultValue": "127.0.0.1"
},
{
"name": "port",
"type": "integer",
"required": false,
"description": "",
"defaultValue": "7687"
},
{
"name": "username",
"type": "string",
"required": false,
"description": "",
"defaultValue": "admin"
},
{
"name": "password",
"type": "string",
"required": false,
"description": "",
"defaultValue": "73@TuGraph"
},
{
"name": "vertex_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "entity"
},
{
"name": "document_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "document"
},
{
"name": "chunk_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "chunk"
},
{
"name": "edge_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "relation"
},
{
"name": "include_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "include"
},
{
"name": "next_type",
"type": "string",
"required": false,
"description": "",
"defaultValue": "next"
},
{
"name": "plugin_names",
"type": "string",
"required": false,
"description": "",
"defaultValue": "['leiden']"
},
{
"name": "enable_summary",
"type": "boolean",
"required": false,
"description": "",
"defaultValue": "True"
},
{
"name": "enable_similarity_search",
"type": "boolean",
"required": false,
"description": "",
"defaultValue": "False"
}
]
}} />
---
title: "Baichuan Proxy LLM Configuration"
description: "Baichuan Proxy LLM"
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "BaichuanDeployModelParameters",
"description": "Baichuan Proxy LLM",
"documentationUrl": "https://platform.baichuan-ai.com/docs/api",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/baichuan"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "prompt_template",
"type": "string",
"required": false,
"description": "Prompt template. If None, the prompt template is automatically determined from model. Just for local deployment."
},
{
"name": "context_length",
"type": "integer",
"required": false,
"description": "The context length of the OpenAI API. If None, it is determined by the model."
},
{
"name": "reasoning_model",
"type": "boolean",
"required": false,
"description": "Whether the model is a reasoning model. If None, it is automatically determined from model."
},
{
"name": "api_base",
"type": "string",
"required": false,
"description": "The base url of the Baichuan API.",
"defaultValue": "${env:BAICHUAN_API_BASE:-https://api.baichuan-ai.com/v1}"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key of the Baichuan API.",
"defaultValue": "${env:BAICHUAN_API_KEY}"
},
{
"name": "api_type",
"type": "string",
"required": false,
"description": "The type of the OpenAI API, if you use Azure, it can be: azure"
},
{
"name": "api_version",
"type": "string",
"required": false,
"description": "The version of the OpenAI API."
},
{
"name": "http_proxy",
"type": "string",
"required": false,
"description": "The http or https proxy to use openai"
}
]
}} />
---
title: "OpenAI Compatible Proxy LLM Configuration"
description: "OpenAI Compatible Proxy LLM"
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "OpenAICompatibleDeployModelParameters",
"description": "OpenAI Compatible Proxy LLM",
"documentationUrl": "https://platform.openai.com/docs/api-reference/chat",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "backend",
"type": "string",
"required": false,
"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/openai"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "prompt_template",
"type": "string",
"required": false,
"description": "Prompt template. If None, the prompt template is automatically determined from model. Just for local deployment."
},
{
"name": "context_length",
"type": "integer",
"required": false,
"description": "The context length of the OpenAI API. If None, it is determined by the model."
},
{
"name": "reasoning_model",
"type": "boolean",
"required": false,
"description": "Whether the model is a reasoning model. If None, it is automatically determined from model."
},
{
"name": "api_base",
"type": "string",
"required": false,
"description": "The base url of the OpenAI API.",
"defaultValue": "${env:OPENAI_API_BASE:-https://api.openai.com/v1}"
},
{
"name": "api_key",
"type": "string",
"required": false,
"description": "The API key of the OpenAI API.",
"defaultValue": "${env:OPENAI_API_KEY}"
},
{
"name": "api_type",
"type": "string",
"required": false,
"description": "The type of the OpenAI API, if you use Azure, it can be: azure"
},
{
"name": "api_version",
"type": "string",
"required": false,
"description": "The version of the OpenAI API."
},
{
"name": "http_proxy",
"type": "string",
"required": false,
"description": "The http or https proxy to use openai"
}
]
}} />
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment