Parcourir la source

fix: httpx socks package missing (#1977)

tags/0.4.5
takatost il y a 1 an
Parent
révision
33901384c6
Aucun compte lié à l'adresse e-mail de l'auteur

+ 1
- 1
api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md Voir le fichier

fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_type=model_type, model_type=model_type,
model_properties={ model_properties={
'mode': ModelType.LLM,
ModelPropertyKey.MODE: ModelType.LLM,
}, },
parameter_rules=rules parameter_rules=rules
) )

+ 16
- 16
api/core/model_runtime/model_providers/azure_openai/_constant.py Voir le fichier

], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 4096,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 4096,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 16385,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 16385,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 8192,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 8192,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 32768,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 32768,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 128000,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 128000,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
], ],
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.CHAT.value,
'context_size': 128000,
ModelPropertyKey.MODE: LLMMode.CHAT.value,
ModelPropertyKey.CONTEXT_SIZE: 128000,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
model_type=ModelType.LLM, model_type=ModelType.LLM,
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.COMPLETION.value,
'context_size': 4096,
ModelPropertyKey.MODE: LLMMode.COMPLETION.value,
ModelPropertyKey.CONTEXT_SIZE: 4096,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(
model_type=ModelType.LLM, model_type=ModelType.LLM,
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_properties={ model_properties={
'mode': LLMMode.COMPLETION.value,
'context_size': 4096,
ModelPropertyKey.MODE: LLMMode.COMPLETION.value,
ModelPropertyKey.CONTEXT_SIZE: 4096,
}, },
parameter_rules=[ parameter_rules=[
ParameterRule( ParameterRule(

+ 2
- 2
api/core/model_runtime/model_providers/huggingface_hub/llm/llm.py Voir le fichier

from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, \ from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool, AssistantPromptMessage, \
UserPromptMessage, SystemPromptMessage UserPromptMessage, SystemPromptMessage
from core.model_runtime.entities.model_entities import ParameterRule, DefaultParameterName, AIModelEntity, ModelType, \ from core.model_runtime.entities.model_entities import ParameterRule, DefaultParameterName, AIModelEntity, ModelType, \
FetchFrom
FetchFrom, ModelPropertyKey
from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub from core.model_runtime.model_providers.huggingface_hub._common import _CommonHuggingfaceHub
fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
model_type=ModelType.LLM, model_type=ModelType.LLM,
model_properties={ model_properties={
'mode': LLMMode.COMPLETION.value
ModelPropertyKey.MODE: LLMMode.COMPLETION.value
}, },
parameter_rules=self._get_customizable_model_parameter_rules() parameter_rules=self._get_customizable_model_parameter_rules()
) )

+ 2
- 2
api/requirements.txt Voir le fichier

cohere~=4.32 cohere~=4.32
pyyaml~=6.0.1 pyyaml~=6.0.1
numpy~=1.25.2 numpy~=1.25.2
unstructured~=0.10.27
unstructured[docx,pptx,msg,md,ppt]~=0.10.27 unstructured[docx,pptx,msg,md,ppt]~=0.10.27
bs4~=0.0.1 bs4~=0.0.1
markdown~=3.5.1 markdown~=3.5.1
google-generativeai~=0.3.2
google-generativeai~=0.3.2
httpx[socks]~=0.24.1

Chargement…
Annuler
Enregistrer