"description": "The real model name to pass to the provider, default is None. If backend is None, use name as the real model name."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "proxy/zhipu"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "100"
},
{
"name": "prompt_template",
"type": "string",
"required": false,
"description": "Prompt template. If None, the prompt template is automatically determined from model. Just for local deployment."
},
{
"name": "context_length",
"type": "integer",
"required": false,
"description": "The context length of the OpenAI API. If None, it is determined by the model."
},
{
"name": "reasoning_model",
"type": "boolean",
"required": false,
"description": "Whether the model is a reasoning model. If None, it is automatically determined from model."
You can define a hook configuration with a path and optional parameters.
It will be used to dynamically load and execute a hook function or a callable
object."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "HookConfig",
"description": "Hook configuration.\n\n You can define a hook configuration with a path and optional parameters.\n It will be used to dynamically load and execute a hook function or a callable\n object.",
"documentationUrl": "",
"parameters": [
{
"name": "path",
"type": "string",
"required": true,
"description": "Hook path, it can be a class path or a function path. eg: 'dbgpt.config.hooks.env_var_hook'"
},
{
"name": "init_params",
"type": "object",
"required": false,
"description": "Hook init params to pass to the hook constructor(Just for class hook), must be key-value pairs",
"defaultValue": "{}"
},
{
"name": "params",
"type": "object",
"required": false,
"description": "Hook params to pass to the hook, must be key-value pairs",
"defaultValue": "{}"
},
{
"name": "enabled",
"type": "boolean",
"required": false,
"description": "Whether the hook is enabled, default is True",
"description": "The path of the model, if you want to deploy a local model."
},
{
"name": "device",
"type": "string",
"required": false,
"description": "Device to run model. If None, the device is automatically determined"
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"defaultValue": "hf"
},
{
"name": "verbose",
"type": "boolean",
"required": false,
"description": "Show verbose output.",
"defaultValue": "False"
},
{
"name": "concurrency",
"type": "integer",
"required": false,
"description": "Model concurrency limit",
"defaultValue": "50"
},
{
"name": "max_length",
"type": "integer",
"required": false,
"description": "Max length for input sequences. Longer sequences will be truncated."
},
{
"name": "model_kwargs",
"type": "object",
"required": false,
"description": "Keyword arguments to pass to the model.",
description: "OpenAPI Reranker Deploy Model Parameters."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "OpenAPIRerankerDeployModelParameters",
"description": "OpenAPI Reranker Deploy Model Parameters.",
"documentationUrl": "",
"parameters": [
{
"name": "name",
"type": "string",
"required": true,
"description": "The name of the model."
},
{
"name": "provider",
"type": "string",
"required": false,
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
"description": "The provider of the model. If model is deployed in local, this is the inference type. If model is deployed in third-party service, this is platform name('proxy/<platform>')",
description: "This configuration is for the model serve module."
---
import { ConfigDetail } from "@site/src/components/mdx/ConfigDetail";
<ConfigDetail config={{
"name": "ServeConfig",
"description": "This configuration is for the model serve module.",
"documentationUrl": null,
"parameters": [
{
"name": "api_keys",
"type": "string",
"required": false,
"description": "API keys for the endpoint, if None, allow all"
},
{
"name": "model_storage",
"type": "string",
"required": false,
"description": "The storage type of model configures, if None, use the default storage(current database). When you run in light mode, it will not use any storage.",