Browse Source

fix: azure openai model parameters wrong when using hosting credentials (#1993)

tags/0.4.5
takatost 1 year ago
parent
commit
bf7045566d
No account linked to committer's email address

+ 16
- 1
api/core/entities/provider_configuration.py View File

:return: :return:
""" """
if self.using_provider_type == ProviderType.SYSTEM: if self.using_provider_type == ProviderType.SYSTEM:
return self.system_configuration.credentials
restrict_models = []
for quota_configuration in self.system_configuration.quota_configurations:
if self.system_configuration.current_quota_type != quota_configuration.quota_type:
continue

restrict_models = quota_configuration.restrict_models

copy_credentials = self.system_configuration.credentials.copy()
if restrict_models:
for restrict_model in restrict_models:
if (restrict_model.model_type == model_type
and restrict_model.model == model
and restrict_model.base_model_name):
copy_credentials['base_model_name'] = restrict_model.base_model_name

return copy_credentials
else: else:
if self.custom_configuration.models: if self.custom_configuration.models:
for model_configuration in self.custom_configuration.models: for model_configuration in self.custom_configuration.models:

+ 2
- 2
api/core/model_runtime/model_providers/azure_openai/_constant.py View File

name='frequency_penalty', name='frequency_penalty',
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY],
), ),
_get_max_tokens(default=512, min_val=1, max_val=128000),
_get_max_tokens(default=512, min_val=1, max_val=4096),
ParameterRule( ParameterRule(
name='seed', name='seed',
label=I18nObject( label=I18nObject(
name='frequency_penalty', name='frequency_penalty',
**PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY], **PARAMETER_RULE_TEMPLATE[DefaultParameterName.FREQUENCY_PENALTY],
), ),
_get_max_tokens(default=512, min_val=1, max_val=128000),
_get_max_tokens(default=512, min_val=1, max_val=4096),
ParameterRule( ParameterRule(
name='seed', name='seed',
label=I18nObject( label=I18nObject(

+ 1
- 1
api/core/model_runtime/model_providers/openai/llm/gpt-4-1106-preview.yaml View File

use_template: max_tokens use_template: max_tokens
default: 512 default: 512
min: 1 min: 1
max: 128000
max: 4096
- name: seed - name: seed
label: label:
zh_Hans: 种子 zh_Hans: 种子

+ 1
- 1
api/core/model_runtime/model_providers/openai/llm/gpt-4-vision-preview.yaml View File

use_template: max_tokens use_template: max_tokens
default: 512 default: 512
min: 1 min: 1
max: 128000
max: 4096
- name: seed - name: seed
label: label:
zh_Hans: 种子 zh_Hans: 种子

Loading…
Cancel
Save