| from werkzeug.exceptions import HTTPException | |||||
| class FilenameNotExistsError(HTTPException): | |||||
| code = 400 | |||||
| description = "The specified filename does not exist." |
| import mimetypes | |||||
| import os | |||||
| import re | |||||
| import urllib.parse | |||||
| from uuid import uuid4 | |||||
| import httpx | |||||
| from pydantic import BaseModel | |||||
| class FileInfo(BaseModel): | |||||
| filename: str | |||||
| extension: str | |||||
| mimetype: str | |||||
| size: int | |||||
| def guess_file_info_from_response(response: httpx.Response): | |||||
| url = str(response.url) | |||||
| # Try to extract filename from URL | |||||
| parsed_url = urllib.parse.urlparse(url) | |||||
| url_path = parsed_url.path | |||||
| filename = os.path.basename(url_path) | |||||
| # If filename couldn't be extracted, use Content-Disposition header | |||||
| if not filename: | |||||
| content_disposition = response.headers.get("Content-Disposition") | |||||
| if content_disposition: | |||||
| filename_match = re.search(r'filename="?(.+)"?', content_disposition) | |||||
| if filename_match: | |||||
| filename = filename_match.group(1) | |||||
| # If still no filename, generate a unique one | |||||
| if not filename: | |||||
| unique_name = str(uuid4()) | |||||
| filename = f"{unique_name}" | |||||
| # Guess MIME type from filename first, then URL | |||||
| mimetype, _ = mimetypes.guess_type(filename) | |||||
| if mimetype is None: | |||||
| mimetype, _ = mimetypes.guess_type(url) | |||||
| if mimetype is None: | |||||
| # If guessing fails, use Content-Type from response headers | |||||
| mimetype = response.headers.get("Content-Type", "application/octet-stream") | |||||
| extension = os.path.splitext(filename)[1] | |||||
| # Ensure filename has an extension | |||||
| if not extension: | |||||
| extension = mimetypes.guess_extension(mimetype) or ".bin" | |||||
| filename = f"{filename}{extension}" | |||||
| return FileInfo( | |||||
| filename=filename, | |||||
| extension=extension, | |||||
| mimetype=mimetype, | |||||
| size=int(response.headers.get("Content-Length", -1)), | |||||
| ) |
| from libs.external_api import ExternalApi | from libs.external_api import ExternalApi | ||||
| from .files import FileApi, FilePreviewApi, FileSupportTypeApi | |||||
| from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi | |||||
| bp = Blueprint("console", __name__, url_prefix="/console/api") | bp = Blueprint("console", __name__, url_prefix="/console/api") | ||||
| api = ExternalApi(bp) | api = ExternalApi(bp) | ||||
| # File | |||||
| api.add_resource(FileApi, "/files/upload") | |||||
| api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview") | |||||
| api.add_resource(FileSupportTypeApi, "/files/support-type") | |||||
| # Remote files | |||||
| api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>") | |||||
| api.add_resource(RemoteFileUploadApi, "/remote-files/upload") | |||||
| # Import other controllers | # Import other controllers | ||||
| from . import admin, apikey, extension, feature, ping, setup, version | from . import admin, apikey, extension, feature, ping, setup, version | ||||
| datasets_document, | datasets_document, | ||||
| datasets_segments, | datasets_segments, | ||||
| external, | external, | ||||
| file, | |||||
| hit_testing, | hit_testing, | ||||
| website, | website, | ||||
| ) | ) |
| from models.model import ApiToken, App | from models.model import ApiToken, App | ||||
| from . import api | from . import api | ||||
| from .setup import setup_required | |||||
| from .wraps import account_initialization_required | |||||
| from .wraps import account_initialization_required, setup_required | |||||
| api_key_fields = { | api_key_fields = { | ||||
| "id": fields.String, | "id": fields.String, |
| from flask_restful import Resource, reqparse | from flask_restful import Resource, reqparse | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.advanced_prompt_template_service import AdvancedPromptTemplateService | from services.advanced_prompt_template_service import AdvancedPromptTemplateService | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from libs.helper import uuid_value | from libs.helper import uuid_value | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from models.model import AppMode | from models.model import AppMode |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.error import NoFileUploadedError | from controllers.console.app.error import NoFileUploadedError | ||||
| from controllers.console.datasets.error import TooManyFilesError | from controllers.console.datasets.error import TooManyFilesError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from extensions.ext_redis import redis_client | from extensions.ext_redis import redis_client | ||||
| from fields.annotation_fields import ( | from fields.annotation_fields import ( | ||||
| annotation_fields, | annotation_fields, |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from core.ops.ops_trace_manager import OpsTraceManager | from core.ops.ops_trace_manager import OpsTraceManager | ||||
| from fields.app_fields import ( | from fields.app_fields import ( | ||||
| app_detail_fields, | app_detail_fields, |
| UnsupportedAudioTypeError, | UnsupportedAudioTypeError, | ||||
| ) | ) | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | ||||
| from core.model_runtime.errors.invoke import InvokeError | from core.model_runtime.errors.invoke import InvokeError | ||||
| from libs.login import login_required | from libs.login import login_required |
| ProviderQuotaExceededError, | ProviderQuotaExceededError, | ||||
| ) | ) | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError | from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError | ||||
| from core.app.apps.base_app_queue_manager import AppQueueManager | from core.app.apps.base_app_queue_manager import AppQueueManager | ||||
| from core.app.entities.app_invoke_entities import InvokeFrom | from core.app.entities.app_invoke_entities import InvokeFrom |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.app.entities.app_invoke_entities import InvokeFrom | from core.app.entities.app_invoke_entities import InvokeFrom | ||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from fields.conversation_fields import ( | from fields.conversation_fields import ( |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from fields.conversation_variable_fields import paginated_conversation_variable_fields | from fields.conversation_variable_fields import paginated_conversation_variable_fields | ||||
| from libs.login import login_required | from libs.login import login_required |
| ProviderNotInitializeError, | ProviderNotInitializeError, | ||||
| ProviderQuotaExceededError, | ProviderQuotaExceededError, | ||||
| ) | ) | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | ||||
| from core.llm_generator.llm_generator import LLMGenerator | from core.llm_generator.llm_generator import LLMGenerator | ||||
| from core.model_runtime.errors.invoke import InvokeError | from core.model_runtime.errors.invoke import InvokeError |
| ) | ) | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError | from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from core.app.entities.app_invoke_entities import InvokeFrom | from core.app.entities.app_invoke_entities import InvokeFrom | ||||
| from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError | ||||
| from core.model_runtime.errors.invoke import InvokeError | from core.model_runtime.errors.invoke import InvokeError |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.agent.entities import AgentToolEntity | from core.agent.entities import AgentToolEntity | ||||
| from core.tools.tool_manager import ToolManager | from core.tools.tool_manager import ToolManager | ||||
| from core.tools.utils.configuration import ToolParameterConfigurationManager | from core.tools.utils.configuration import ToolParameterConfigurationManager |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist | from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.ops_service import OpsService | from services.ops_service import OpsService | ||||
| from constants.languages import supported_language | from constants.languages import supported_language | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from fields.app_fields import app_site_fields | from fields.app_fields import app_site_fields | ||||
| from libs.login import login_required | from libs.login import login_required |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.helper import DatetimeString | from libs.helper import DatetimeString | ||||
| from libs.login import login_required | from libs.login import login_required |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync | from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.app.apps.base_app_queue_manager import AppQueueManager | from core.app.apps.base_app_queue_manager import AppQueueManager | ||||
| from core.app.entities.app_invoke_entities import InvokeFrom | from core.app.entities.app_invoke_entities import InvokeFrom | ||||
| from factories import variable_factory | from factories import variable_factory |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from fields.workflow_app_log_fields import workflow_app_log_pagination_fields | from fields.workflow_app_log_fields import workflow_app_log_pagination_fields | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from models import App | from models import App |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from fields.workflow_run_fields import ( | from fields.workflow_run_fields import ( | ||||
| advanced_chat_workflow_run_pagination_fields, | advanced_chat_workflow_run_pagination_fields, | ||||
| workflow_run_detail_fields, | workflow_run_detail_fields, |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.wraps import get_app_model | from controllers.console.app.wraps import get_app_model | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.helper import DatetimeString | from libs.helper import DatetimeString | ||||
| from libs.login import login_required | from libs.login import login_required |
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.auth.api_key_auth_service import ApiKeyAuthService | from services.auth.api_key_auth_service import ApiKeyAuthService | ||||
| from ..setup import setup_required | |||||
| from ..wraps import account_initialization_required | |||||
| from ..wraps import account_initialization_required, setup_required | |||||
| class ApiKeyAuthDataSource(Resource): | class ApiKeyAuthDataSource(Resource): |
| from libs.login import login_required | from libs.login import login_required | ||||
| from libs.oauth_data_source import NotionOAuth | from libs.oauth_data_source import NotionOAuth | ||||
| from ..setup import setup_required | |||||
| from ..wraps import account_initialization_required | |||||
| from ..wraps import account_initialization_required, setup_required | |||||
| def get_oauth_providers(): | def get_oauth_providers(): |
| PasswordMismatchError, | PasswordMismatchError, | ||||
| ) | ) | ||||
| from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister | from controllers.console.error import EmailSendIpLimitError, NotAllowedRegister | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import setup_required | |||||
| from events.tenant_event import tenant_was_created | from events.tenant_event import tenant_was_created | ||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.helper import email, extract_remote_ip | from libs.helper import email, extract_remote_ip |
| NotAllowedCreateWorkspace, | NotAllowedCreateWorkspace, | ||||
| NotAllowedRegister, | NotAllowedRegister, | ||||
| ) | ) | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import setup_required | |||||
| from events.tenant_event import tenant_was_created | from events.tenant_event import tenant_was_created | ||||
| from libs.helper import email, extract_remote_ip | from libs.helper import email, extract_remote_ip | ||||
| from libs.password import valid_password | from libs.password import valid_password |
| from flask_restful import Resource, reqparse | from flask_restful import Resource, reqparse | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, only_edition_cloud | |||||
| from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required | |||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.billing_service import BillingService | from services.billing_service import BillingService | ||||
| from werkzeug.exceptions import NotFound | from werkzeug.exceptions import NotFound | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.indexing_runner import IndexingRunner | from core.indexing_runner import IndexingRunner | ||||
| from core.rag.extractor.entity.extract_setting import ExtractSetting | from core.rag.extractor.entity.extract_setting import ExtractSetting | ||||
| from core.rag.extractor.notion_extractor import NotionExtractor | from core.rag.extractor.notion_extractor import NotionExtractor |
| from controllers.console.apikey import api_key_fields, api_key_list | from controllers.console.apikey import api_key_fields, api_key_list | ||||
| from controllers.console.app.error import ProviderNotInitializeError | from controllers.console.app.error import ProviderNotInitializeError | ||||
| from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError | from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError | from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError | ||||
| from core.indexing_runner import IndexingRunner | from core.indexing_runner import IndexingRunner | ||||
| from core.model_runtime.entities.model_entities import ModelType | from core.model_runtime.entities.model_entities import ModelType |
| InvalidActionError, | InvalidActionError, | ||||
| InvalidMetadataError, | InvalidMetadataError, | ||||
| ) | ) | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from core.errors.error import ( | from core.errors.error import ( | ||||
| LLMBadRequestError, | LLMBadRequestError, | ||||
| ModelCurrentlyNotSupportError, | ModelCurrentlyNotSupportError, |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.app.error import ProviderNotInitializeError | from controllers.console.app.error import ProviderNotInitializeError | ||||
| from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError | from controllers.console.datasets.error import InvalidActionError, NoFileUploadedError, TooManyFilesError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import ( | from controllers.console.wraps import ( | ||||
| account_initialization_required, | account_initialization_required, | ||||
| cloud_edition_billing_knowledge_limit_check, | cloud_edition_billing_knowledge_limit_check, | ||||
| cloud_edition_billing_resource_check, | cloud_edition_billing_resource_check, | ||||
| setup_required, | |||||
| ) | ) | ||||
| from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError | from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError | ||||
| from core.model_manager import ModelManager | from core.model_manager import ModelManager |
| import services | import services | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.datasets.error import DatasetNameDuplicateError | from controllers.console.datasets.error import DatasetNameDuplicateError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from fields.dataset_fields import dataset_detail_fields | from fields.dataset_fields import dataset_detail_fields | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.dataset_service import DatasetService | from services.dataset_service import DatasetService |
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase | from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.datasets.error import WebsiteCrawlError | from controllers.console.datasets.error import WebsiteCrawlError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.website_service import WebsiteService | from services.website_service import WebsiteService | ||||
| from constants import HIDDEN_VALUE | from constants import HIDDEN_VALUE | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from fields.api_based_extension_fields import api_based_extension_fields | from fields.api_based_extension_fields import api_based_extension_fields | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from models.api_based_extension import APIBasedExtension | from models.api_based_extension import APIBasedExtension |
| from services.feature_service import FeatureService | from services.feature_service import FeatureService | ||||
| from . import api | from . import api | ||||
| from .setup import setup_required | |||||
| from .wraps import account_initialization_required, cloud_utm_record | |||||
| from .wraps import account_initialization_required, cloud_utm_record, setup_required | |||||
| class FeatureApi(Resource): | class FeatureApi(Resource): |
| import urllib.parse | |||||
| from flask import request | from flask import request | ||||
| from flask_login import current_user | from flask_login import current_user | ||||
| from flask_restful import Resource, marshal_with, reqparse | |||||
| from flask_restful import Resource, marshal_with | |||||
| import services | import services | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from constants import DOCUMENT_EXTENSIONS | from constants import DOCUMENT_EXTENSIONS | ||||
| from controllers.console import api | |||||
| from controllers.console.datasets.error import ( | |||||
| from controllers.common.errors import FilenameNotExistsError | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from fields.file_fields import file_fields, upload_config_fields | |||||
| from libs.login import login_required | |||||
| from services.file_service import FileService | |||||
| from .errors import ( | |||||
| FileTooLargeError, | FileTooLargeError, | ||||
| NoFileUploadedError, | NoFileUploadedError, | ||||
| TooManyFilesError, | TooManyFilesError, | ||||
| UnsupportedFileTypeError, | UnsupportedFileTypeError, | ||||
| ) | ) | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from core.helper import ssrf_proxy | |||||
| from fields.file_fields import file_fields, remote_file_info_fields, upload_config_fields | |||||
| from libs.login import login_required | |||||
| from services.file_service import FileService | |||||
| PREVIEW_WORDS_LIMIT = 3000 | PREVIEW_WORDS_LIMIT = 3000 | ||||
| @marshal_with(file_fields) | @marshal_with(file_fields) | ||||
| @cloud_edition_billing_resource_check("documents") | @cloud_edition_billing_resource_check("documents") | ||||
| def post(self): | def post(self): | ||||
| # get file from request | |||||
| file = request.files["file"] | file = request.files["file"] | ||||
| source = request.form.get("source") | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("source", type=str, required=False, location="args") | |||||
| source = parser.parse_args().get("source") | |||||
| # check file | |||||
| if "file" not in request.files: | if "file" not in request.files: | ||||
| raise NoFileUploadedError() | raise NoFileUploadedError() | ||||
| if len(request.files) > 1: | if len(request.files) > 1: | ||||
| raise TooManyFilesError() | raise TooManyFilesError() | ||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| if source not in ("datasets", None): | |||||
| source = None | |||||
| try: | try: | ||||
| upload_file = FileService.upload_file(file=file, user=current_user, source=source) | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=current_user, | |||||
| source=source, | |||||
| ) | |||||
| except services.errors.file.FileTooLargeError as file_too_large_error: | except services.errors.file.FileTooLargeError as file_too_large_error: | ||||
| raise FileTooLargeError(file_too_large_error.description) | raise FileTooLargeError(file_too_large_error.description) | ||||
| except services.errors.file.UnsupportedFileTypeError: | except services.errors.file.UnsupportedFileTypeError: | ||||
| @account_initialization_required | @account_initialization_required | ||||
| def get(self): | def get(self): | ||||
| return {"allowed_extensions": DOCUMENT_EXTENSIONS} | return {"allowed_extensions": DOCUMENT_EXTENSIONS} | ||||
| class RemoteFileInfoApi(Resource): | |||||
| @marshal_with(remote_file_info_fields) | |||||
| def get(self, url): | |||||
| decoded_url = urllib.parse.unquote(url) | |||||
| try: | |||||
| response = ssrf_proxy.head(decoded_url) | |||||
| return { | |||||
| "file_type": response.headers.get("Content-Type", "application/octet-stream"), | |||||
| "file_length": int(response.headers.get("Content-Length", 0)), | |||||
| } | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| api.add_resource(FileApi, "/files/upload") | |||||
| api.add_resource(FilePreviewApi, "/files/<uuid:file_id>/preview") | |||||
| api.add_resource(FileSupportTypeApi, "/files/support-type") | |||||
| api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>") |
| from libs.exception import BaseHTTPException | |||||
| class FileTooLargeError(BaseHTTPException): | |||||
| error_code = "file_too_large" | |||||
| description = "File size exceeded. {message}" | |||||
| code = 413 | |||||
| class UnsupportedFileTypeError(BaseHTTPException): | |||||
| error_code = "unsupported_file_type" | |||||
| description = "File type not allowed." | |||||
| code = 415 | |||||
| class TooManyFilesError(BaseHTTPException): | |||||
| error_code = "too_many_files" | |||||
| description = "Only one file is allowed." | |||||
| code = 400 | |||||
| class NoFileUploadedError(BaseHTTPException): | |||||
| error_code = "no_file_uploaded" | |||||
| description = "Please upload your file." | |||||
| code = 400 |
| import urllib.parse | |||||
| from typing import cast | |||||
| from flask_login import current_user | |||||
| from flask_restful import Resource, marshal_with, reqparse | |||||
| from controllers.common import helpers | |||||
| from core.file import helpers as file_helpers | |||||
| from core.helper import ssrf_proxy | |||||
| from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields | |||||
| from models.account import Account | |||||
| from services.file_service import FileService | |||||
| class RemoteFileInfoApi(Resource): | |||||
| @marshal_with(remote_file_info_fields) | |||||
| def get(self, url): | |||||
| decoded_url = urllib.parse.unquote(url) | |||||
| try: | |||||
| response = ssrf_proxy.head(decoded_url) | |||||
| return { | |||||
| "file_type": response.headers.get("Content-Type", "application/octet-stream"), | |||||
| "file_length": int(response.headers.get("Content-Length", 0)), | |||||
| } | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| class RemoteFileUploadApi(Resource): | |||||
| @marshal_with(file_fields_with_signed_url) | |||||
| def post(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("url", type=str, required=True, help="URL is required") | |||||
| args = parser.parse_args() | |||||
| url = args["url"] | |||||
| response = ssrf_proxy.head(url) | |||||
| response.raise_for_status() | |||||
| file_info = helpers.guess_file_info_from_response(response) | |||||
| if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): | |||||
| return {"error": "File size exceeded"}, 400 | |||||
| response = ssrf_proxy.get(url) | |||||
| response.raise_for_status() | |||||
| content = response.content | |||||
| try: | |||||
| user = cast(Account, current_user) | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file_info.filename, | |||||
| content=content, | |||||
| mimetype=file_info.mimetype, | |||||
| user=user, | |||||
| source_url=url, | |||||
| ) | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| return { | |||||
| "id": upload_file.id, | |||||
| "name": upload_file.name, | |||||
| "size": upload_file.size, | |||||
| "extension": upload_file.extension, | |||||
| "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), | |||||
| "mime_type": upload_file.mime_type, | |||||
| "created_by": upload_file.created_by, | |||||
| "created_at": upload_file.created_at, | |||||
| }, 201 |
| from functools import wraps | |||||
| from flask import request | from flask import request | ||||
| from flask_restful import Resource, reqparse | from flask_restful import Resource, reqparse | ||||
| from services.account_service import RegisterService, TenantService | from services.account_service import RegisterService, TenantService | ||||
| from . import api | from . import api | ||||
| from .error import AlreadySetupError, NotInitValidateError, NotSetupError | |||||
| from .error import AlreadySetupError, NotInitValidateError | |||||
| from .init_validate import get_init_validate_status | from .init_validate import get_init_validate_status | ||||
| from .wraps import only_edition_self_hosted | from .wraps import only_edition_self_hosted | ||||
| return {"result": "success"}, 201 | return {"result": "success"}, 201 | ||||
| def setup_required(view): | |||||
| @wraps(view) | |||||
| def decorated(*args, **kwargs): | |||||
| # check setup | |||||
| if not get_init_validate_status(): | |||||
| raise NotInitValidateError() | |||||
| elif not get_setup_status(): | |||||
| raise NotSetupError() | |||||
| return view(*args, **kwargs) | |||||
| return decorated | |||||
| def get_setup_status(): | def get_setup_status(): | ||||
| if dify_config.EDITION == "SELF_HOSTED": | if dify_config.EDITION == "SELF_HOSTED": | ||||
| return DifySetup.query.first() | return DifySetup.query.first() | ||||
| else: | |||||
| return True | |||||
| return True | |||||
| api.add_resource(SetupApi, "/setup") | api.add_resource(SetupApi, "/setup") |
| from werkzeug.exceptions import Forbidden | from werkzeug.exceptions import Forbidden | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from fields.tag_fields import tag_fields | from fields.tag_fields import tag_fields | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from models.model import Tag | from models.model import Tag |
| from configs import dify_config | from configs import dify_config | ||||
| from constants.languages import supported_language | from constants.languages import supported_language | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.workspace.error import ( | from controllers.console.workspace.error import ( | ||||
| AccountAlreadyInitedError, | AccountAlreadyInitedError, | ||||
| CurrentPasswordIncorrectError, | CurrentPasswordIncorrectError, | ||||
| InvalidInvitationCodeError, | InvalidInvitationCodeError, | ||||
| RepeatPasswordNotMatchError, | RepeatPasswordNotMatchError, | ||||
| ) | ) | ||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from fields.member_fields import account_fields | from fields.member_fields import account_fields | ||||
| from libs.helper import TimestampField, timezone | from libs.helper import TimestampField, timezone |
| from werkzeug.exceptions import Forbidden | from werkzeug.exceptions import Forbidden | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.model_runtime.entities.model_entities import ModelType | from core.model_runtime.entities.model_entities import ModelType | ||||
| from core.model_runtime.errors.validate import CredentialsValidateFailedError | from core.model_runtime.errors.validate import CredentialsValidateFailedError | ||||
| from libs.login import current_user, login_required | from libs.login import current_user, login_required |
| import services | import services | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from fields.member_fields import account_with_role_list_fields | from fields.member_fields import account_with_role_list_fields | ||||
| from libs.login import login_required | from libs.login import login_required |
| from werkzeug.exceptions import Forbidden | from werkzeug.exceptions import Forbidden | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.model_runtime.entities.model_entities import ModelType | from core.model_runtime.entities.model_entities import ModelType | ||||
| from core.model_runtime.errors.validate import CredentialsValidateFailedError | from core.model_runtime.errors.validate import CredentialsValidateFailedError | ||||
| from core.model_runtime.utils.encoders import jsonable_encoder | from core.model_runtime.utils.encoders import jsonable_encoder |
| from werkzeug.exceptions import Forbidden | from werkzeug.exceptions import Forbidden | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.model_runtime.entities.model_entities import ModelType | from core.model_runtime.entities.model_entities import ModelType | ||||
| from core.model_runtime.errors.validate import CredentialsValidateFailedError | from core.model_runtime.errors.validate import CredentialsValidateFailedError | ||||
| from core.model_runtime.utils.encoders import jsonable_encoder | from core.model_runtime.utils.encoders import jsonable_encoder |
| from configs import dify_config | from configs import dify_config | ||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, setup_required | |||||
| from core.model_runtime.utils.encoders import jsonable_encoder | from core.model_runtime.utils.encoders import jsonable_encoder | ||||
| from libs.helper import alphanumeric, uuid_value | from libs.helper import alphanumeric, uuid_value | ||||
| from libs.login import login_required | from libs.login import login_required |
| from werkzeug.exceptions import Unauthorized | from werkzeug.exceptions import Unauthorized | ||||
| import services | import services | ||||
| from controllers.common.errors import FilenameNotExistsError | |||||
| from controllers.console import api | from controllers.console import api | ||||
| from controllers.console.admin import admin_required | from controllers.console.admin import admin_required | ||||
| from controllers.console.datasets.error import ( | from controllers.console.datasets.error import ( | ||||
| UnsupportedFileTypeError, | UnsupportedFileTypeError, | ||||
| ) | ) | ||||
| from controllers.console.error import AccountNotLinkTenantError | from controllers.console.error import AccountNotLinkTenantError | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from controllers.console.wraps import ( | |||||
| account_initialization_required, | |||||
| cloud_edition_billing_resource_check, | |||||
| setup_required, | |||||
| ) | |||||
| from extensions.ext_database import db | from extensions.ext_database import db | ||||
| from libs.helper import TimestampField | from libs.helper import TimestampField | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| if len(request.files) > 1: | if len(request.files) > 1: | ||||
| raise TooManyFilesError() | raise TooManyFilesError() | ||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| extension = file.filename.split(".")[-1] | extension = file.filename.split(".")[-1] | ||||
| if extension.lower() not in {"svg", "png"}: | if extension.lower() not in {"svg", "png"}: | ||||
| raise UnsupportedFileTypeError() | raise UnsupportedFileTypeError() | ||||
| try: | try: | ||||
| upload_file = FileService.upload_file(file=file, user=current_user) | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=current_user, | |||||
| ) | |||||
| except services.errors.file.FileTooLargeError as file_too_large_error: | except services.errors.file.FileTooLargeError as file_too_large_error: | ||||
| raise FileTooLargeError(file_too_large_error.description) | raise FileTooLargeError(file_too_large_error.description) |
| import json | import json | ||||
| import os | |||||
| from functools import wraps | from functools import wraps | ||||
| from flask import abort, request | from flask import abort, request | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from controllers.console.workspace.error import AccountNotInitializedError | from controllers.console.workspace.error import AccountNotInitializedError | ||||
| from models.model import DifySetup | |||||
| from services.feature_service import FeatureService | from services.feature_service import FeatureService | ||||
| from services.operation_service import OperationService | from services.operation_service import OperationService | ||||
| from .error import NotInitValidateError, NotSetupError | |||||
| def account_initialization_required(view): | def account_initialization_required(view): | ||||
| @wraps(view) | @wraps(view) | ||||
| return view(*args, **kwargs) | return view(*args, **kwargs) | ||||
| return decorated | return decorated | ||||
| def setup_required(view): | |||||
| @wraps(view) | |||||
| def decorated(*args, **kwargs): | |||||
| # check setup | |||||
| if dify_config.EDITION == "SELF_HOSTED" and os.environ.get("INIT_PASSWORD") and not DifySetup.query.first(): | |||||
| raise NotInitValidateError() | |||||
| elif dify_config.EDITION == "SELF_HOSTED" and not DifySetup.query.first(): | |||||
| raise NotSetupError() | |||||
| return view(*args, **kwargs) | |||||
| return decorated |
| from flask_restful import Resource, reqparse | from flask_restful import Resource, reqparse | ||||
| from controllers.console.setup import setup_required | |||||
| from controllers.console.wraps import setup_required | |||||
| from controllers.inner_api import api | from controllers.inner_api import api | ||||
| from controllers.inner_api.wraps import inner_api_only | from controllers.inner_api.wraps import inner_api_only | ||||
| from events.tenant_event import tenant_was_created | from events.tenant_event import tenant_was_created |
| from flask_restful import Resource, marshal_with | from flask_restful import Resource, marshal_with | ||||
| import services | import services | ||||
| from controllers.common.errors import FilenameNotExistsError | |||||
| from controllers.service_api import api | from controllers.service_api import api | ||||
| from controllers.service_api.app.error import ( | from controllers.service_api.app.error import ( | ||||
| FileTooLargeError, | FileTooLargeError, | ||||
| if len(request.files) > 1: | if len(request.files) > 1: | ||||
| raise TooManyFilesError() | raise TooManyFilesError() | ||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| try: | try: | ||||
| upload_file = FileService.upload_file(file, end_user) | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=end_user, | |||||
| source="datasets", | |||||
| ) | |||||
| except services.errors.file.FileTooLargeError as file_too_large_error: | except services.errors.file.FileTooLargeError as file_too_large_error: | ||||
| raise FileTooLargeError(file_too_large_error.description) | raise FileTooLargeError(file_too_large_error.description) | ||||
| except services.errors.file.UnsupportedFileTypeError: | except services.errors.file.UnsupportedFileTypeError: |
| from werkzeug.exceptions import NotFound | from werkzeug.exceptions import NotFound | ||||
| import services.dataset_service | import services.dataset_service | ||||
| from controllers.common.errors import FilenameNotExistsError | |||||
| from controllers.service_api import api | from controllers.service_api import api | ||||
| from controllers.service_api.app.error import ProviderNotInitializeError | from controllers.service_api.app.error import ProviderNotInitializeError | ||||
| from controllers.service_api.dataset.error import ( | from controllers.service_api.dataset.error import ( | ||||
| if not dataset.indexing_technique and not args["indexing_technique"]: | if not dataset.indexing_technique and not args["indexing_technique"]: | ||||
| raise ValueError("indexing_technique is required.") | raise ValueError("indexing_technique is required.") | ||||
| upload_file = FileService.upload_text(args.get("text"), args.get("name")) | |||||
| text = args.get("text") | |||||
| name = args.get("name") | |||||
| if text is None or name is None: | |||||
| raise ValueError("Both 'text' and 'name' must be non-null values.") | |||||
| upload_file = FileService.upload_text(text=str(text), text_name=str(name)) | |||||
| data_source = { | data_source = { | ||||
| "type": "upload_file", | "type": "upload_file", | ||||
| "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, | "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, | ||||
| raise ValueError("Dataset is not exist.") | raise ValueError("Dataset is not exist.") | ||||
| if args["text"]: | if args["text"]: | ||||
| upload_file = FileService.upload_text(args.get("text"), args.get("name")) | |||||
| text = args.get("text") | |||||
| name = args.get("name") | |||||
| if text is None or name is None: | |||||
| raise ValueError("Both text and name must be strings.") | |||||
| upload_file = FileService.upload_text(text=str(text), text_name=str(name)) | |||||
| data_source = { | data_source = { | ||||
| "type": "upload_file", | "type": "upload_file", | ||||
| "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, | "info_list": {"data_source_type": "upload_file", "file_info_list": {"file_ids": [upload_file.id]}}, | ||||
| if len(request.files) > 1: | if len(request.files) > 1: | ||||
| raise TooManyFilesError() | raise TooManyFilesError() | ||||
| upload_file = FileService.upload_file(file, current_user) | |||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=current_user, | |||||
| source="datasets", | |||||
| ) | |||||
| data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} | data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} | ||||
| args["data_source"] = data_source | args["data_source"] = data_source | ||||
| # validate args | # validate args | ||||
| if len(request.files) > 1: | if len(request.files) > 1: | ||||
| raise TooManyFilesError() | raise TooManyFilesError() | ||||
| upload_file = FileService.upload_file(file, current_user) | |||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=current_user, | |||||
| source="datasets", | |||||
| ) | |||||
| data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} | data_source = {"type": "upload_file", "info_list": {"file_info_list": {"file_ids": [upload_file.id]}}} | ||||
| args["data_source"] = data_source | args["data_source"] = data_source | ||||
| # validate args | # validate args |
| from libs.external_api import ExternalApi | from libs.external_api import ExternalApi | ||||
| from .files import FileApi | |||||
| from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi | |||||
| bp = Blueprint("web", __name__, url_prefix="/api") | bp = Blueprint("web", __name__, url_prefix="/api") | ||||
| api = ExternalApi(bp) | api = ExternalApi(bp) | ||||
| # Files | |||||
| api.add_resource(FileApi, "/files/upload") | |||||
| # Remote files | |||||
| api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>") | |||||
| api.add_resource(RemoteFileUploadApi, "/remote-files/upload") | |||||
| from . import app, audio, completion, conversation, feature, file, message, passport, saved_message, site, workflow | |||||
| from . import app, audio, completion, conversation, feature, message, passport, saved_message, site, workflow |
| import urllib.parse | |||||
| from flask import request | |||||
| from flask_restful import marshal_with, reqparse | |||||
| import services | |||||
| from controllers.web import api | |||||
| from controllers.web.error import FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError | |||||
| from controllers.web.wraps import WebApiResource | |||||
| from core.helper import ssrf_proxy | |||||
| from fields.file_fields import file_fields, remote_file_info_fields | |||||
| from services.file_service import FileService | |||||
| class FileApi(WebApiResource): | |||||
| @marshal_with(file_fields) | |||||
| def post(self, app_model, end_user): | |||||
| # get file from request | |||||
| file = request.files["file"] | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("source", type=str, required=False, location="args") | |||||
| source = parser.parse_args().get("source") | |||||
| # check file | |||||
| if "file" not in request.files: | |||||
| raise NoFileUploadedError() | |||||
| if len(request.files) > 1: | |||||
| raise TooManyFilesError() | |||||
| try: | |||||
| upload_file = FileService.upload_file(file=file, user=end_user, source=source) | |||||
| except services.errors.file.FileTooLargeError as file_too_large_error: | |||||
| raise FileTooLargeError(file_too_large_error.description) | |||||
| except services.errors.file.UnsupportedFileTypeError: | |||||
| raise UnsupportedFileTypeError() | |||||
| return upload_file, 201 | |||||
| class RemoteFileInfoApi(WebApiResource): | |||||
| @marshal_with(remote_file_info_fields) | |||||
| def get(self, url): | |||||
| decoded_url = urllib.parse.unquote(url) | |||||
| try: | |||||
| response = ssrf_proxy.head(decoded_url) | |||||
| return { | |||||
| "file_type": response.headers.get("Content-Type", "application/octet-stream"), | |||||
| "file_length": int(response.headers.get("Content-Length", -1)), | |||||
| } | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| api.add_resource(FileApi, "/files/upload") | |||||
| api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>") |
| from flask import request | |||||
| from flask_restful import marshal_with | |||||
| import services | |||||
| from controllers.common.errors import FilenameNotExistsError | |||||
| from controllers.web.error import FileTooLargeError, NoFileUploadedError, TooManyFilesError, UnsupportedFileTypeError | |||||
| from controllers.web.wraps import WebApiResource | |||||
| from fields.file_fields import file_fields | |||||
| from services.file_service import FileService | |||||
| class FileApi(WebApiResource): | |||||
| @marshal_with(file_fields) | |||||
| def post(self, app_model, end_user): | |||||
| file = request.files["file"] | |||||
| source = request.form.get("source") | |||||
| if "file" not in request.files: | |||||
| raise NoFileUploadedError() | |||||
| if len(request.files) > 1: | |||||
| raise TooManyFilesError() | |||||
| if not file.filename: | |||||
| raise FilenameNotExistsError | |||||
| if source not in ("datasets", None): | |||||
| source = None | |||||
| try: | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file.filename, | |||||
| content=file.read(), | |||||
| mimetype=file.mimetype, | |||||
| user=end_user, | |||||
| source=source, | |||||
| ) | |||||
| except services.errors.file.FileTooLargeError as file_too_large_error: | |||||
| raise FileTooLargeError(file_too_large_error.description) | |||||
| except services.errors.file.UnsupportedFileTypeError: | |||||
| raise UnsupportedFileTypeError() | |||||
| return upload_file, 201 |
| import urllib.parse | |||||
| from flask_login import current_user | |||||
| from flask_restful import marshal_with, reqparse | |||||
| from controllers.common import helpers | |||||
| from controllers.web.wraps import WebApiResource | |||||
| from core.file import helpers as file_helpers | |||||
| from core.helper import ssrf_proxy | |||||
| from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields | |||||
| from services.file_service import FileService | |||||
| class RemoteFileInfoApi(WebApiResource): | |||||
| @marshal_with(remote_file_info_fields) | |||||
| def get(self, url): | |||||
| decoded_url = urllib.parse.unquote(url) | |||||
| try: | |||||
| response = ssrf_proxy.head(decoded_url) | |||||
| return { | |||||
| "file_type": response.headers.get("Content-Type", "application/octet-stream"), | |||||
| "file_length": int(response.headers.get("Content-Length", -1)), | |||||
| } | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| class RemoteFileUploadApi(WebApiResource): | |||||
| @marshal_with(file_fields_with_signed_url) | |||||
| def post(self): | |||||
| parser = reqparse.RequestParser() | |||||
| parser.add_argument("url", type=str, required=True, help="URL is required") | |||||
| args = parser.parse_args() | |||||
| url = args["url"] | |||||
| response = ssrf_proxy.head(url) | |||||
| response.raise_for_status() | |||||
| file_info = helpers.guess_file_info_from_response(response) | |||||
| if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size): | |||||
| return {"error": "File size exceeded"}, 400 | |||||
| response = ssrf_proxy.get(url) | |||||
| response.raise_for_status() | |||||
| content = response.content | |||||
| try: | |||||
| upload_file = FileService.upload_file( | |||||
| filename=file_info.filename, | |||||
| content=content, | |||||
| mimetype=file_info.mimetype, | |||||
| user=current_user, | |||||
| source_url=url, | |||||
| ) | |||||
| except Exception as e: | |||||
| return {"error": str(e)}, 400 | |||||
| return { | |||||
| "id": upload_file.id, | |||||
| "name": upload_file.name, | |||||
| "size": upload_file.size, | |||||
| "extension": upload_file.extension, | |||||
| "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), | |||||
| "mime_type": upload_file.mime_type, | |||||
| "created_by": upload_file.created_by, | |||||
| "created_at": upload_file.created_at, | |||||
| }, 201 |
| tenant_id=tenant_id, | tenant_id=tenant_id, | ||||
| type=file_type, | type=file_type, | ||||
| transfer_method=transfer_method, | transfer_method=transfer_method, | ||||
| remote_url=None, | |||||
| remote_url=row.source_url, | |||||
| related_id=mapping.get("upload_file_id"), | related_id=mapping.get("upload_file_id"), | ||||
| _extra_config=config, | _extra_config=config, | ||||
| size=row.size, | size=row.size, |
| "file_type": fields.String(attribute="file_type"), | "file_type": fields.String(attribute="file_type"), | ||||
| "file_length": fields.Integer(attribute="file_length"), | "file_length": fields.Integer(attribute="file_length"), | ||||
| } | } | ||||
| file_fields_with_signed_url = { | |||||
| "id": fields.String, | |||||
| "name": fields.String, | |||||
| "size": fields.Integer, | |||||
| "extension": fields.String, | |||||
| "url": fields.String, | |||||
| "mime_type": fields.String, | |||||
| "created_by": fields.String, | |||||
| "created_at": TimestampField, | |||||
| } |
| sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), | sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), | ||||
| sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') | sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') | ||||
| ) | ) | ||||
| with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: | |||||
| batch_op.create_index('tracing_app_config_app_id_idx', ['app_id'], unique=False) | |||||
| # ### end Alembic commands ### | # ### end Alembic commands ### | ||||
| def downgrade(): | def downgrade(): | ||||
| # ### commands auto generated by Alembic - please adjust! ## | # ### commands auto generated by Alembic - please adjust! ## | ||||
| with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: | |||||
| batch_op.drop_index('tracing_app_config_app_id_idx') | |||||
| op.drop_table('tracing_app_configs') | op.drop_table('tracing_app_configs') | ||||
| # ### end Alembic commands ### | # ### end Alembic commands ### |
| """Add upload_files.source_url | |||||
| Revision ID: d3f6769a94a3 | |||||
| Revises: 43fa78bc3b7d | |||||
| Create Date: 2024-11-01 04:34:23.816198 | |||||
| """ | |||||
| from alembic import op | |||||
| import models as models | |||||
| import sqlalchemy as sa | |||||
| from sqlalchemy.dialects import postgresql | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = 'd3f6769a94a3' | |||||
| down_revision = '43fa78bc3b7d' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('upload_files', schema=None) as batch_op: | |||||
| batch_op.add_column(sa.Column('source_url', sa.String(length=255), server_default='', nullable=False)) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('upload_files', schema=None) as batch_op: | |||||
| batch_op.drop_column('source_url') | |||||
| # ### end Alembic commands ### |
| """rename conversation variables index name | |||||
| Revision ID: 93ad8c19c40b | |||||
| Revises: d3f6769a94a3 | |||||
| Create Date: 2024-11-01 04:49:53.100250 | |||||
| """ | |||||
| from alembic import op | |||||
| import sqlalchemy as sa | |||||
| from sqlalchemy.dialects import postgresql | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = '93ad8c19c40b' | |||||
| down_revision = 'd3f6769a94a3' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| conn = op.get_bind() | |||||
| if conn.dialect.name == 'postgresql': | |||||
| # Rename indexes for PostgreSQL | |||||
| op.execute('ALTER INDEX workflow__conversation_variables_app_id_idx RENAME TO workflow_conversation_variables_app_id_idx') | |||||
| op.execute('ALTER INDEX workflow__conversation_variables_created_at_idx RENAME TO workflow_conversation_variables_created_at_idx') | |||||
| else: | |||||
| # For other databases, use the original drop and create method | |||||
| with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op: | |||||
| batch_op.drop_index('workflow__conversation_variables_app_id_idx') | |||||
| batch_op.drop_index('workflow__conversation_variables_created_at_idx') | |||||
| batch_op.create_index(batch_op.f('workflow_conversation_variables_app_id_idx'), ['app_id'], unique=False) | |||||
| batch_op.create_index(batch_op.f('workflow_conversation_variables_created_at_idx'), ['created_at'], unique=False) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| conn = op.get_bind() | |||||
| if conn.dialect.name == 'postgresql': | |||||
| # Rename indexes back for PostgreSQL | |||||
| op.execute('ALTER INDEX workflow_conversation_variables_app_id_idx RENAME TO workflow__conversation_variables_app_id_idx') | |||||
| op.execute('ALTER INDEX workflow_conversation_variables_created_at_idx RENAME TO workflow__conversation_variables_created_at_idx') | |||||
| else: | |||||
| # For other databases, use the original drop and create method | |||||
| with op.batch_alter_table('workflow_conversation_variables', schema=None) as batch_op: | |||||
| batch_op.drop_index(batch_op.f('workflow_conversation_variables_created_at_idx')) | |||||
| batch_op.drop_index(batch_op.f('workflow_conversation_variables_app_id_idx')) | |||||
| batch_op.create_index('workflow__conversation_variables_created_at_idx', ['created_at'], unique=False) | |||||
| batch_op.create_index('workflow__conversation_variables_app_id_idx', ['app_id'], unique=False) | |||||
| # ### end Alembic commands ### |
| """update upload_files.source_url | |||||
| Revision ID: f4d7ce70a7ca | |||||
| Revises: 93ad8c19c40b | |||||
| Create Date: 2024-11-01 05:40:03.531751 | |||||
| """ | |||||
| from alembic import op | |||||
| import models as models | |||||
| import sqlalchemy as sa | |||||
| from sqlalchemy.dialects import postgresql | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = 'f4d7ce70a7ca' | |||||
| down_revision = '93ad8c19c40b' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('upload_files', schema=None) as batch_op: | |||||
| batch_op.alter_column('source_url', | |||||
| existing_type=sa.VARCHAR(length=255), | |||||
| type_=sa.TEXT(), | |||||
| existing_nullable=False, | |||||
| existing_server_default=sa.text("''::character varying")) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('upload_files', schema=None) as batch_op: | |||||
| batch_op.alter_column('source_url', | |||||
| existing_type=sa.TEXT(), | |||||
| type_=sa.VARCHAR(length=255), | |||||
| existing_nullable=False, | |||||
| existing_server_default=sa.text("''::character varying")) | |||||
| # ### end Alembic commands ### |
| """update type of custom_disclaimer to TEXT | |||||
| Revision ID: d07474999927 | |||||
| Revises: f4d7ce70a7ca | |||||
| Create Date: 2024-11-01 06:22:27.981398 | |||||
| """ | |||||
| from alembic import op | |||||
| import models as models | |||||
| import sqlalchemy as sa | |||||
| from sqlalchemy.dialects import postgresql | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = 'd07474999927' | |||||
| down_revision = 'f4d7ce70a7ca' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| op.execute("UPDATE recommended_apps SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") | |||||
| op.execute("UPDATE sites SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") | |||||
| op.execute("UPDATE tool_api_providers SET custom_disclaimer = '' WHERE custom_disclaimer IS NULL") | |||||
| with op.batch_alter_table('recommended_apps', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.VARCHAR(length=255), | |||||
| type_=sa.TEXT(), | |||||
| nullable=False) | |||||
| with op.batch_alter_table('sites', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.VARCHAR(length=255), | |||||
| type_=sa.TEXT(), | |||||
| nullable=False) | |||||
| with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.VARCHAR(length=255), | |||||
| type_=sa.TEXT(), | |||||
| nullable=False) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('tool_api_providers', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.TEXT(), | |||||
| type_=sa.VARCHAR(length=255), | |||||
| nullable=True) | |||||
| with op.batch_alter_table('sites', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.TEXT(), | |||||
| type_=sa.VARCHAR(length=255), | |||||
| nullable=True) | |||||
| with op.batch_alter_table('recommended_apps', schema=None) as batch_op: | |||||
| batch_op.alter_column('custom_disclaimer', | |||||
| existing_type=sa.TEXT(), | |||||
| type_=sa.VARCHAR(length=255), | |||||
| nullable=True) | |||||
| # ### end Alembic commands ### |
| """update workflows graph, features and updated_at | |||||
| Revision ID: 09a8d1878d9b | |||||
| Revises: d07474999927 | |||||
| Create Date: 2024-11-01 06:23:59.579186 | |||||
| """ | |||||
| from alembic import op | |||||
| import models as models | |||||
| import sqlalchemy as sa | |||||
| from sqlalchemy.dialects import postgresql | |||||
| # revision identifiers, used by Alembic. | |||||
| revision = '09a8d1878d9b' | |||||
| down_revision = 'd07474999927' | |||||
| branch_labels = None | |||||
| depends_on = None | |||||
| def upgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('conversations', schema=None) as batch_op: | |||||
| batch_op.alter_column('inputs', | |||||
| existing_type=postgresql.JSON(astext_type=sa.Text()), | |||||
| nullable=False) | |||||
| with op.batch_alter_table('messages', schema=None) as batch_op: | |||||
| batch_op.alter_column('inputs', | |||||
| existing_type=postgresql.JSON(astext_type=sa.Text()), | |||||
| nullable=False) | |||||
| op.execute("UPDATE workflows SET updated_at = created_at WHERE updated_at IS NULL") | |||||
| op.execute("UPDATE workflows SET graph = '' WHERE graph IS NULL") | |||||
| op.execute("UPDATE workflows SET features = '' WHERE features IS NULL") | |||||
| with op.batch_alter_table('workflows', schema=None) as batch_op: | |||||
| batch_op.alter_column('graph', | |||||
| existing_type=sa.TEXT(), | |||||
| nullable=False) | |||||
| batch_op.alter_column('features', | |||||
| existing_type=sa.TEXT(), | |||||
| type_=sa.String(), | |||||
| nullable=False) | |||||
| batch_op.alter_column('updated_at', | |||||
| existing_type=postgresql.TIMESTAMP(), | |||||
| nullable=False) | |||||
| # ### end Alembic commands ### | |||||
| def downgrade(): | |||||
| # ### commands auto generated by Alembic - please adjust! ### | |||||
| with op.batch_alter_table('workflows', schema=None) as batch_op: | |||||
| batch_op.alter_column('updated_at', | |||||
| existing_type=postgresql.TIMESTAMP(), | |||||
| nullable=True) | |||||
| batch_op.alter_column('features', | |||||
| existing_type=sa.String(), | |||||
| type_=sa.TEXT(), | |||||
| nullable=True) | |||||
| batch_op.alter_column('graph', | |||||
| existing_type=sa.TEXT(), | |||||
| nullable=True) | |||||
| with op.batch_alter_table('messages', schema=None) as batch_op: | |||||
| batch_op.alter_column('inputs', | |||||
| existing_type=postgresql.JSON(astext_type=sa.Text()), | |||||
| nullable=True) | |||||
| with op.batch_alter_table('conversations', schema=None) as batch_op: | |||||
| batch_op.alter_column('inputs', | |||||
| existing_type=postgresql.JSON(astext_type=sa.Text()), | |||||
| nullable=True) | |||||
| # ### end Alembic commands ### |
| with op.batch_alter_table('apps', schema=None) as batch_op: | with op.batch_alter_table('apps', schema=None) as batch_op: | ||||
| batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) | batch_op.add_column(sa.Column('tracing', sa.Text(), nullable=True)) | ||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | |||||
| batch_op.create_index('tracing_app_config_app_id_idx', ['app_id'], unique=False) | |||||
| # ### end Alembic commands ### | # ### end Alembic commands ### | ||||
| def downgrade(): | def downgrade(): | ||||
| # ### commands auto generated by Alembic - please adjust! ### | # ### commands auto generated by Alembic - please adjust! ### | ||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | |||||
| batch_op.drop_index('tracing_app_config_app_id_idx') | |||||
| with op.batch_alter_table('apps', schema=None) as batch_op: | with op.batch_alter_table('apps', schema=None) as batch_op: | ||||
| batch_op.drop_column('tracing') | batch_op.drop_column('tracing') | ||||
| sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), | sa.Column('is_active', sa.Boolean(), server_default=sa.text('true'), nullable=False), | ||||
| sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') | sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') | ||||
| ) | ) | ||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | ||||
| batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) | batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) | ||||
| with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: | |||||
| batch_op.drop_index('tracing_app_config_app_id_idx') | |||||
| # ### end Alembic commands ### | # ### end Alembic commands ### | ||||
| def downgrade(): | def downgrade(): | ||||
| # ### commands auto generated by Alembic - please adjust! ### | # ### commands auto generated by Alembic - please adjust! ### | ||||
| op.create_table('tracing_app_configs', | |||||
| sa.Column('id', sa.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False), | |||||
| sa.Column('app_id', sa.UUID(), autoincrement=False, nullable=False), | |||||
| sa.Column('tracing_provider', sa.VARCHAR(length=255), autoincrement=False, nullable=True), | |||||
| sa.Column('tracing_config', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True), | |||||
| sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False), | |||||
| sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('now()'), autoincrement=False, nullable=False), | |||||
| sa.PrimaryKeyConstraint('id', name='trace_app_config_pkey') | |||||
| ) | |||||
| with op.batch_alter_table('tracing_app_configs', schema=None) as batch_op: | |||||
| batch_op.create_index('trace_app_config_app_id_idx', ['app_id'], unique=False) | |||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | |||||
| batch_op.drop_index('trace_app_config_app_id_idx') | |||||
| op.drop_table('trace_app_config') | op.drop_table('trace_app_config') | ||||
| # ### end Alembic commands ### | # ### end Alembic commands ### |
| # ### commands auto generated by Alembic - please adjust! ### | # ### commands auto generated by Alembic - please adjust! ### | ||||
| op.drop_table('tracing_app_configs') | op.drop_table('tracing_app_configs') | ||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | |||||
| batch_op.drop_index('tracing_app_config_app_id_idx') | |||||
| # idx_dataset_permissions_tenant_id | # idx_dataset_permissions_tenant_id | ||||
| with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: | with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: | ||||
| batch_op.create_index('idx_dataset_permissions_tenant_id', ['tenant_id']) | batch_op.create_index('idx_dataset_permissions_tenant_id', ['tenant_id']) | ||||
| # ### end Alembic commands ### | # ### end Alembic commands ### | ||||
| sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') | sa.PrimaryKeyConstraint('id', name='tracing_app_config_pkey') | ||||
| ) | ) | ||||
| with op.batch_alter_table('trace_app_config', schema=None) as batch_op: | |||||
| batch_op.create_index('tracing_app_config_app_id_idx', ['app_id']) | |||||
| with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: | with op.batch_alter_table('dataset_permissions', schema=None) as batch_op: | ||||
| batch_op.drop_index('idx_dataset_permissions_tenant_id') | batch_op.drop_index('idx_dataset_permissions_tenant_id') | ||||
| # ### end Alembic commands ### | # ### end Alembic commands ### |
| from enum import Enum | from enum import Enum | ||||
| from typing import Any, Literal, Optional | from typing import Any, Literal, Optional | ||||
| import sqlalchemy as sa | |||||
| from flask import request | from flask import request | ||||
| from flask_login import UserMixin | from flask_login import UserMixin | ||||
| from pydantic import BaseModel, Field | from pydantic import BaseModel, Field | ||||
| description = db.Column(db.JSON, nullable=False) | description = db.Column(db.JSON, nullable=False) | ||||
| copyright = db.Column(db.String(255), nullable=False) | copyright = db.Column(db.String(255), nullable=False) | ||||
| privacy_policy = db.Column(db.String(255), nullable=False) | privacy_policy = db.Column(db.String(255), nullable=False) | ||||
| custom_disclaimer = db.Column(db.String(255), nullable=True) | |||||
| custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") | |||||
| category = db.Column(db.String(255), nullable=False) | category = db.Column(db.String(255), nullable=False) | ||||
| position = db.Column(db.Integer, nullable=False, default=0) | position = db.Column(db.Integer, nullable=False, default=0) | ||||
| is_listed = db.Column(db.Boolean, nullable=False, default=True) | is_listed = db.Column(db.Boolean, nullable=False, default=True) | ||||
| privacy_policy = db.Column(db.String(255)) | privacy_policy = db.Column(db.String(255)) | ||||
| show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) | show_workflow_steps = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) | ||||
| use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) | use_icon_as_answer_icon = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) | ||||
| custom_disclaimer = db.Column(db.String(255), nullable=True) | |||||
| custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") | |||||
| customize_domain = db.Column(db.String(255)) | customize_domain = db.Column(db.String(255)) | ||||
| customize_token_strategy = db.Column(db.String(255), nullable=False) | customize_token_strategy = db.Column(db.String(255), nullable=False) | ||||
| prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) | prompt_public = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) | ||||
| used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True) | used_by: Mapped[str | None] = db.Column(StringUUID, nullable=True) | ||||
| used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True) | used_at: Mapped[datetime | None] = db.Column(db.DateTime, nullable=True) | ||||
| hash: Mapped[str | None] = db.Column(db.String(255), nullable=True) | hash: Mapped[str | None] = db.Column(db.String(255), nullable=True) | ||||
| source_url: Mapped[str] = mapped_column(sa.TEXT, default="") | |||||
| def __init__( | def __init__( | ||||
| self, | self, | ||||
| used_by: str | None = None, | used_by: str | None = None, | ||||
| used_at: datetime | None = None, | used_at: datetime | None = None, | ||||
| hash: str | None = None, | hash: str | None = None, | ||||
| ) -> None: | |||||
| source_url: str = "", | |||||
| ): | |||||
| self.tenant_id = tenant_id | self.tenant_id = tenant_id | ||||
| self.storage_type = storage_type | self.storage_type = storage_type | ||||
| self.key = key | self.key = key | ||||
| self.used_by = used_by | self.used_by = used_by | ||||
| self.used_at = used_at | self.used_at = used_at | ||||
| self.hash = hash | self.hash = hash | ||||
| self.source_url = source_url | |||||
| class ApiRequest(db.Model): | class ApiRequest(db.Model): |
| import json | import json | ||||
| from typing import Optional | from typing import Optional | ||||
| import sqlalchemy as sa | |||||
| from sqlalchemy import ForeignKey | from sqlalchemy import ForeignKey | ||||
| from sqlalchemy.orm import Mapped, mapped_column | from sqlalchemy.orm import Mapped, mapped_column | ||||
| # privacy policy | # privacy policy | ||||
| privacy_policy = db.Column(db.String(255), nullable=True) | privacy_policy = db.Column(db.String(255), nullable=True) | ||||
| # custom_disclaimer | # custom_disclaimer | ||||
| custom_disclaimer = db.Column(db.String(255), nullable=True) | |||||
| custom_disclaimer: Mapped[str] = mapped_column(sa.TEXT, default="") | |||||
| created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) | created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) | ||||
| updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) | updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) |
| created_at: Mapped[datetime] = mapped_column( | created_at: Mapped[datetime] = mapped_column( | ||||
| db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") | db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") | ||||
| ) | ) | ||||
| updated_by: Mapped[str] = mapped_column(StringUUID) | |||||
| updated_at: Mapped[datetime] = mapped_column(db.DateTime) | |||||
| updated_by: Mapped[Optional[str]] = mapped_column(StringUUID) | |||||
| updated_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False) | |||||
| _environment_variables: Mapped[str] = mapped_column( | _environment_variables: Mapped[str] = mapped_column( | ||||
| "environment_variables", db.Text, nullable=False, server_default="{}" | "environment_variables", db.Text, nullable=False, server_default="{}" | ||||
| ) | ) |
| import random | import random | ||||
| import time | import time | ||||
| import uuid | import uuid | ||||
| from typing import Optional | |||||
| from typing import Any, Optional | |||||
| from flask_login import current_user | from flask_login import current_user | ||||
| from sqlalchemy import func | from sqlalchemy import func | ||||
| def save_document_with_dataset_id( | def save_document_with_dataset_id( | ||||
| dataset: Dataset, | dataset: Dataset, | ||||
| document_data: dict, | document_data: dict, | ||||
| account: Account, | |||||
| account: Account | Any, | |||||
| dataset_process_rule: Optional[DatasetProcessRule] = None, | dataset_process_rule: Optional[DatasetProcessRule] = None, | ||||
| created_from: str = "web", | created_from: str = "web", | ||||
| ): | ): |
| import datetime | import datetime | ||||
| import hashlib | import hashlib | ||||
| import uuid | import uuid | ||||
| from typing import Literal, Union | |||||
| from typing import Any, Literal, Union | |||||
| from flask_login import current_user | from flask_login import current_user | ||||
| from werkzeug.datastructures import FileStorage | |||||
| from werkzeug.exceptions import NotFound | from werkzeug.exceptions import NotFound | ||||
| from configs import dify_config | from configs import dify_config | ||||
| from models.account import Account | from models.account import Account | ||||
| from models.enums import CreatedByRole | from models.enums import CreatedByRole | ||||
| from models.model import EndUser, UploadFile | from models.model import EndUser, UploadFile | ||||
| from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError | |||||
| from .errors.file import FileTooLargeError, UnsupportedFileTypeError | |||||
| PREVIEW_WORDS_LIMIT = 3000 | PREVIEW_WORDS_LIMIT = 3000 | ||||
| class FileService: | class FileService: | ||||
| @staticmethod | @staticmethod | ||||
| def upload_file( | def upload_file( | ||||
| file: FileStorage, user: Union[Account, EndUser], source: Literal["datasets"] | None = None | |||||
| *, | |||||
| filename: str, | |||||
| content: bytes, | |||||
| mimetype: str, | |||||
| user: Union[Account, EndUser, Any], | |||||
| source: Literal["datasets"] | None = None, | |||||
| source_url: str = "", | |||||
| ) -> UploadFile: | ) -> UploadFile: | ||||
| # get file name | |||||
| filename = file.filename | |||||
| if not filename: | |||||
| raise FileNotExistsError | |||||
| # get file extension | |||||
| extension = filename.split(".")[-1].lower() | extension = filename.split(".")[-1].lower() | ||||
| if len(filename) > 200: | if len(filename) > 200: | ||||
| filename = filename.split(".")[0][:200] + "." + extension | filename = filename.split(".")[0][:200] + "." + extension | ||||
| if source == "datasets" and extension not in DOCUMENT_EXTENSIONS: | if source == "datasets" and extension not in DOCUMENT_EXTENSIONS: | ||||
| raise UnsupportedFileTypeError() | raise UnsupportedFileTypeError() | ||||
| # select file size limit | |||||
| if extension in IMAGE_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| elif extension in VIDEO_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| elif extension in AUDIO_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| else: | |||||
| file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| # read file content | |||||
| file_content = file.read() | |||||
| # get file size | # get file size | ||||
| file_size = len(file_content) | |||||
| file_size = len(content) | |||||
| # check if the file size is exceeded | # check if the file size is exceeded | ||||
| if file_size > file_size_limit: | |||||
| message = f"File size exceeded. {file_size} > {file_size_limit}" | |||||
| raise FileTooLargeError(message) | |||||
| if not FileService.is_file_size_within_limit(extension=extension, file_size=file_size): | |||||
| raise FileTooLargeError | |||||
| # generate file key | # generate file key | ||||
| file_uuid = str(uuid.uuid4()) | file_uuid = str(uuid.uuid4()) | ||||
| file_key = "upload_files/" + current_tenant_id + "/" + file_uuid + "." + extension | file_key = "upload_files/" + current_tenant_id + "/" + file_uuid + "." + extension | ||||
| # save file to storage | # save file to storage | ||||
| storage.save(file_key, file_content) | |||||
| storage.save(file_key, content) | |||||
| # save file to db | # save file to db | ||||
| upload_file = UploadFile( | upload_file = UploadFile( | ||||
| name=filename, | name=filename, | ||||
| size=file_size, | size=file_size, | ||||
| extension=extension, | extension=extension, | ||||
| mime_type=file.mimetype, | |||||
| mime_type=mimetype, | |||||
| created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER), | created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER), | ||||
| created_by=user.id, | created_by=user.id, | ||||
| created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), | created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), | ||||
| used=False, | used=False, | ||||
| hash=hashlib.sha3_256(file_content).hexdigest(), | |||||
| hash=hashlib.sha3_256(content).hexdigest(), | |||||
| source_url=source_url, | |||||
| ) | ) | ||||
| db.session.add(upload_file) | db.session.add(upload_file) | ||||
| return upload_file | return upload_file | ||||
| @staticmethod | |||||
| def is_file_size_within_limit(*, extension: str, file_size: int) -> bool: | |||||
| if extension in IMAGE_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| elif extension in VIDEO_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| elif extension in AUDIO_EXTENSIONS: | |||||
| file_size_limit = dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| else: | |||||
| file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT * 1024 * 1024 | |||||
| return file_size <= file_size_limit | |||||
| @staticmethod | @staticmethod | ||||
| def upload_text(text: str, text_name: str) -> UploadFile: | def upload_text(text: str, text_name: str) -> UploadFile: | ||||
| if len(text_name) > 200: | if len(text_name) > 200: |