| @@ -10,8 +10,14 @@ model_properties: | |||
| parameter_rules: | |||
| - name: temperature | |||
| use_template: temperature | |||
| min: 0.01 | |||
| max: 1 | |||
| default: 0.9 | |||
| - name: top_p | |||
| use_template: top_p | |||
| min: 0.01 | |||
| max: 1 | |||
| default: 0.95 | |||
| - name: max_tokens | |||
| use_template: max_tokens | |||
| required: true | |||
| @@ -0,0 +1,35 @@ | |||
| model: abab5.5s-chat | |||
| label: | |||
| en_US: Abab5.5s-Chat | |||
| model_type: llm | |||
| features: | |||
| - agent-thought | |||
| model_properties: | |||
| mode: chat | |||
| context_size: 8192 | |||
| parameter_rules: | |||
| - name: temperature | |||
| use_template: temperature | |||
| min: 0.01 | |||
| max: 1 | |||
| default: 0.9 | |||
| - name: top_p | |||
| use_template: top_p | |||
| min: 0.01 | |||
| max: 1 | |||
| default: 0.95 | |||
| - name: max_tokens | |||
| use_template: max_tokens | |||
| required: true | |||
| default: 3072 | |||
| min: 1 | |||
| max: 8192 | |||
| - name: presence_penalty | |||
| use_template: presence_penalty | |||
| - name: frequency_penalty | |||
| use_template: frequency_penalty | |||
| pricing: | |||
| input: '0.00' | |||
| output: '0.005' | |||
| unit: '0.001' | |||
| currency: RMB | |||
| @@ -22,7 +22,7 @@ class MinimaxChatCompletionPro(object): | |||
| """ | |||
| generate chat completion | |||
| """ | |||
| if model != 'abab5.5-chat': | |||
| if model not in ['abab5.5-chat', 'abab5.5s-chat']: | |||
| raise BadRequestError(f'Invalid model: {model}') | |||
| if not api_key or not group_id: | |||
| @@ -18,6 +18,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage | |||
| class MinimaxLargeLanguageModel(LargeLanguageModel): | |||
| model_apis = { | |||
| 'abab5.5s-chat': MinimaxChatCompletionPro, | |||
| 'abab5.5-chat': MinimaxChatCompletionPro, | |||
| 'abab5-chat': MinimaxChatCompletion | |||
| } | |||