1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
| { "models": { "default": "bailian/qwen3.5-plus", "byTask": { "chat": "bailian/qwen3.5-plus", "code": "bailian/qwen-coder", "analysis": "bailian/qwen-max", "translation": "bailian/qwen-turbo" }, "byChannel": { "feishu": "bailian/qwen3.5-plus", "discord": "bailian/qwen-plus", "web": "bailian/qwen3.5-plus" }, "providers": { "bailian": { "baseUrl": "https://coding.dashscope.aliyuncs.com/v1", "apiKey": "sk-xxxxxxxxxxxxxxxx", "models": { "qwen3.5-plus": { "contextWindow": 32768, "maxTokens": 8192, "temperature": 0.7, "topP": 0.9 }, "qwen-coder": { "contextWindow": 32768, "maxTokens": 8192, "temperature": 0.3, "topP": 0.95 }, "qwen-max": { "contextWindow": 32768, "maxTokens": 8192, "temperature": 0.5, "topP": 0.9 }, "qwen-turbo": { "contextWindow": 8192, "maxTokens": 2048, "temperature": 0.7, "topP": 0.9 } } }, "openai": { "baseUrl": "https://api.openai.com/v1", "apiKey": "sk-xxxxxxxxxxxxxxxx", "models": { "gpt-4": { "contextWindow": 8192, "maxTokens": 4096, "temperature": 0.7 }, "gpt-3.5-turbo": { "contextWindow": 4096, "maxTokens": 4096, "temperature": 0.7 } } }, "local": { "baseUrl": "http://localhost:11434/v1", "models": { "llama2": { "contextWindow": 4096, "maxTokens": 2048, "temperature": 0.7 } } } } } }
|