| UnsupportedFileTypeError, | UnsupportedFileTypeError, | ||||
| ) | ) | ||||
| from controllers.console.setup import setup_required | from controllers.console.setup import setup_required | ||||
| from controllers.console.wraps import account_initialization_required | |||||
| from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check | |||||
| from fields.file_fields import file_fields, upload_config_fields | from fields.file_fields import file_fields, upload_config_fields | ||||
| from libs.login import login_required | from libs.login import login_required | ||||
| from services.file_service import ALLOWED_EXTENSIONS, UNSTRUSTURED_ALLOWED_EXTENSIONS, FileService | from services.file_service import ALLOWED_EXTENSIONS, UNSTRUSTURED_ALLOWED_EXTENSIONS, FileService | ||||
| @login_required | @login_required | ||||
| @account_initialization_required | @account_initialization_required | ||||
| @marshal_with(file_fields) | @marshal_with(file_fields) | ||||
| @cloud_edition_billing_resource_check(resource='documents') | |||||
| def post(self): | def post(self): | ||||
| # get file from request | # get file from request |
| members = features.members | members = features.members | ||||
| apps = features.apps | apps = features.apps | ||||
| vector_space = features.vector_space | vector_space = features.vector_space | ||||
| documents_upload_quota = features.documents_upload_quota | |||||
| annotation_quota_limit = features.annotation_quota_limit | annotation_quota_limit = features.annotation_quota_limit | ||||
| if resource == 'members' and 0 < members.limit <= members.size: | if resource == 'members' and 0 < members.limit <= members.size: | ||||
| abort(403, error_msg) | abort(403, error_msg) | ||||
| elif resource == 'vector_space' and 0 < vector_space.limit <= vector_space.size: | elif resource == 'vector_space' and 0 < vector_space.limit <= vector_space.size: | ||||
| abort(403, error_msg) | abort(403, error_msg) | ||||
| elif resource == 'documents' and 0 < documents_upload_quota.limit <= documents_upload_quota.size: | |||||
| # The api of file upload is used in the multiple places, so we need to check the source of the request from datasets | |||||
| source = request.args.get('source') | |||||
| if source == 'datasets': | |||||
| abort(403, error_msg) | |||||
| else: | |||||
| return view(*args, **kwargs) | |||||
| elif resource == 'workspace_custom' and not features.can_replace_logo: | elif resource == 'workspace_custom' and not features.can_replace_logo: | ||||
| abort(403, error_msg) | abort(403, error_msg) | ||||
| elif resource == 'annotation' and 0 < annotation_quota_limit.limit < annotation_quota_limit.size: | elif resource == 'annotation' and 0 < annotation_quota_limit.limit < annotation_quota_limit.size: |
| """Resource for documents.""" | """Resource for documents.""" | ||||
| @cloud_edition_billing_resource_check('vector_space', 'dataset') | @cloud_edition_billing_resource_check('vector_space', 'dataset') | ||||
| @cloud_edition_billing_resource_check('documents', 'dataset') | |||||
| def post(self, tenant_id, dataset_id): | def post(self, tenant_id, dataset_id): | ||||
| """Create document by text.""" | """Create document by text.""" | ||||
| parser = reqparse.RequestParser() | parser = reqparse.RequestParser() | ||||
| class DocumentAddByFileApi(DatasetApiResource): | class DocumentAddByFileApi(DatasetApiResource): | ||||
| """Resource for documents.""" | """Resource for documents.""" | ||||
| @cloud_edition_billing_resource_check('vector_space', 'dataset') | @cloud_edition_billing_resource_check('vector_space', 'dataset') | ||||
| @cloud_edition_billing_resource_check('documents', 'dataset') | |||||
| def post(self, tenant_id, dataset_id): | def post(self, tenant_id, dataset_id): | ||||
| """Create document by upload file.""" | """Create document by upload file.""" | ||||
| args = {} | args = {} |
| members = features.members | members = features.members | ||||
| apps = features.apps | apps = features.apps | ||||
| vector_space = features.vector_space | vector_space = features.vector_space | ||||
| documents_upload_quota = features.documents_upload_quota | |||||
| if resource == 'members' and 0 < members.limit <= members.size: | if resource == 'members' and 0 < members.limit <= members.size: | ||||
| raise Unauthorized(error_msg) | raise Unauthorized(error_msg) | ||||
| raise Unauthorized(error_msg) | raise Unauthorized(error_msg) | ||||
| elif resource == 'vector_space' and 0 < vector_space.limit <= vector_space.size: | elif resource == 'vector_space' and 0 < vector_space.limit <= vector_space.size: | ||||
| raise Unauthorized(error_msg) | raise Unauthorized(error_msg) | ||||
| elif resource == 'documents' and 0 < documents_upload_quota.limit <= documents_upload_quota.size: | |||||
| raise Unauthorized(error_msg) | |||||
| else: | else: | ||||
| return view(*args, **kwargs) | return view(*args, **kwargs) | ||||
| from services.errors.dataset import DatasetNameDuplicateError | from services.errors.dataset import DatasetNameDuplicateError | ||||
| from services.errors.document import DocumentIndexingError | from services.errors.document import DocumentIndexingError | ||||
| from services.errors.file import FileNotExistsError | from services.errors.file import FileNotExistsError | ||||
| from services.feature_service import FeatureService | |||||
| from services.feature_service import FeatureModel, FeatureService | |||||
| from services.vector_service import VectorService | from services.vector_service import VectorService | ||||
| from tasks.clean_notion_document_task import clean_notion_document_task | from tasks.clean_notion_document_task import clean_notion_document_task | ||||
| from tasks.deal_dataset_vector_index_task import deal_dataset_vector_index_task | from tasks.deal_dataset_vector_index_task import deal_dataset_vector_index_task | ||||
| batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT']) | batch_upload_limit = int(current_app.config['BATCH_UPLOAD_LIMIT']) | ||||
| if count > batch_upload_limit: | if count > batch_upload_limit: | ||||
| raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.") | raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.") | ||||
| DocumentService.check_documents_upload_quota(count, features) | |||||
| # if dataset is empty, update dataset data_source_type | # if dataset is empty, update dataset data_source_type | ||||
| if not dataset.data_source_type: | if not dataset.data_source_type: | ||||
| dataset.data_source_type = document_data["data_source"]["type"] | dataset.data_source_type = document_data["data_source"]["type"] | ||||
| return documents, batch | return documents, batch | ||||
| @staticmethod | |||||
| def check_documents_upload_quota(count: int, features: FeatureModel): | |||||
| can_upload_size = features.documents_upload_quota.limit - features.documents_upload_quota.size | |||||
| if count > can_upload_size: | |||||
| raise ValueError(f'You have reached the limit of your subscription. Only {can_upload_size} documents can be uploaded.') | |||||
| @staticmethod | @staticmethod | ||||
| def build_document(dataset: Dataset, process_rule_id: str, data_source_type: str, document_form: str, | def build_document(dataset: Dataset, process_rule_id: str, data_source_type: str, document_form: str, | ||||
| document_language: str, data_source_info: dict, created_from: str, position: int, | document_language: str, data_source_info: dict, created_from: str, position: int, | ||||
| if count > batch_upload_limit: | if count > batch_upload_limit: | ||||
| raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.") | raise ValueError(f"You have reached the batch upload limit of {batch_upload_limit}.") | ||||
| DocumentService.check_documents_upload_quota(count, features) | |||||
| embedding_model = None | embedding_model = None | ||||
| dataset_collection_binding_id = None | dataset_collection_binding_id = None | ||||
| retrieval_model = None | retrieval_model = None |
| apps: LimitationModel = LimitationModel(size=0, limit=10) | apps: LimitationModel = LimitationModel(size=0, limit=10) | ||||
| vector_space: LimitationModel = LimitationModel(size=0, limit=5) | vector_space: LimitationModel = LimitationModel(size=0, limit=5) | ||||
| annotation_quota_limit: LimitationModel = LimitationModel(size=0, limit=10) | annotation_quota_limit: LimitationModel = LimitationModel(size=0, limit=10) | ||||
| documents_upload_quota: LimitationModel = LimitationModel(size=0, limit=50) | |||||
| docs_processing: str = 'standard' | docs_processing: str = 'standard' | ||||
| can_replace_logo: bool = False | can_replace_logo: bool = False | ||||
| features.vector_space.size = billing_info['vector_space']['size'] | features.vector_space.size = billing_info['vector_space']['size'] | ||||
| features.vector_space.limit = billing_info['vector_space']['limit'] | features.vector_space.limit = billing_info['vector_space']['limit'] | ||||
| features.documents_upload_quota.size = billing_info['documents_upload_quota']['size'] | |||||
| features.documents_upload_quota.limit = billing_info['documents_upload_quota']['limit'] | |||||
| features.annotation_quota_limit.size = billing_info['annotation_quota_limit']['size'] | features.annotation_quota_limit.size = billing_info['annotation_quota_limit']['size'] | ||||
| features.annotation_quota_limit.limit = billing_info['annotation_quota_limit']['limit'] | features.annotation_quota_limit.limit = billing_info['annotation_quota_limit']['limit'] | ||||
| teamMembers: 1, | teamMembers: 1, | ||||
| buildApps: 10, | buildApps: 10, | ||||
| vectorSpace: 5, | vectorSpace: 5, | ||||
| documentsUploadQuota: 50, | |||||
| documentProcessingPriority: Priority.standard, | documentProcessingPriority: Priority.standard, | ||||
| logHistory: 30, | logHistory: 30, | ||||
| customTools: unAvailable, | customTools: unAvailable, | ||||
| teamMembers: 3, | teamMembers: 3, | ||||
| buildApps: 50, | buildApps: 50, | ||||
| vectorSpace: 200, | vectorSpace: 200, | ||||
| documentsUploadQuota: 500, | |||||
| documentProcessingPriority: Priority.priority, | documentProcessingPriority: Priority.priority, | ||||
| logHistory: NUM_INFINITE, | logHistory: NUM_INFINITE, | ||||
| customTools: 10, | customTools: 10, | ||||
| teamMembers: NUM_INFINITE, | teamMembers: NUM_INFINITE, | ||||
| buildApps: NUM_INFINITE, | buildApps: NUM_INFINITE, | ||||
| vectorSpace: 1000, | vectorSpace: 1000, | ||||
| documentsUploadQuota: 1000, | |||||
| documentProcessingPriority: Priority.topPriority, | documentProcessingPriority: Priority.topPriority, | ||||
| logHistory: NUM_INFINITE, | logHistory: NUM_INFINITE, | ||||
| customTools: NUM_INFINITE, | customTools: NUM_INFINITE, | ||||
| teamMembers: NUM_INFINITE, | teamMembers: NUM_INFINITE, | ||||
| buildApps: NUM_INFINITE, | buildApps: NUM_INFINITE, | ||||
| vectorSpace: NUM_INFINITE, | vectorSpace: NUM_INFINITE, | ||||
| documentsUploadQuota: NUM_INFINITE, | |||||
| documentProcessingPriority: Priority.topPriority, | documentProcessingPriority: Priority.topPriority, | ||||
| logHistory: NUM_INFINITE, | logHistory: NUM_INFINITE, | ||||
| customTools: NUM_INFINITE, | customTools: NUM_INFINITE, |
| <div className='mt-3.5 flex items-center space-x-1'> | <div className='mt-3.5 flex items-center space-x-1'> | ||||
| <div>+ {t('billing.plansCommon.supportItems.logoChange')}</div> | <div>+ {t('billing.plansCommon.supportItems.logoChange')}</div> | ||||
| </div> | </div> | ||||
| <div className='mt-3.5 flex items-center space-x-1'> | |||||
| <div>+ {t('billing.plansCommon.supportItems.bulkUpload')}</div> | |||||
| </div> | |||||
| <div className='mt-3.5 flex items-center space-x-1'> | <div className='mt-3.5 flex items-center space-x-1'> | ||||
| <div className='flex items-center'> | <div className='flex items-center'> | ||||
| + | + | ||||
| value={planInfo.vectorSpace === NUM_INFINITE ? t('billing.plansCommon.unlimited') as string : (planInfo.vectorSpace >= 1000 ? `${planInfo.vectorSpace / 1000}G` : `${planInfo.vectorSpace}MB`)} | value={planInfo.vectorSpace === NUM_INFINITE ? t('billing.plansCommon.unlimited') as string : (planInfo.vectorSpace >= 1000 ? `${planInfo.vectorSpace / 1000}G` : `${planInfo.vectorSpace}MB`)} | ||||
| tooltip={t('billing.plansCommon.vectorSpaceBillingTooltip') as string} | tooltip={t('billing.plansCommon.vectorSpaceBillingTooltip') as string} | ||||
| /> | /> | ||||
| <KeyValue | |||||
| label={t('billing.plansCommon.documentsUploadQuota')} | |||||
| value={planInfo.vectorSpace === NUM_INFINITE ? t('billing.plansCommon.unlimited') as string : planInfo.documentsUploadQuota} | |||||
| /> | |||||
| <KeyValue | <KeyValue | ||||
| label={t('billing.plansCommon.documentProcessingPriority')} | label={t('billing.plansCommon.documentProcessingPriority')} | ||||
| value={t(`billing.plansCommon.priority.${planInfo.documentProcessingPriority}`) as string} | value={t(`billing.plansCommon.priority.${planInfo.documentProcessingPriority}`) as string} |
| teamMembers: number | teamMembers: number | ||||
| buildApps: number | buildApps: number | ||||
| vectorSpace: number | vectorSpace: number | ||||
| documentsUploadQuota: number | |||||
| documentProcessingPriority: Priority | documentProcessingPriority: Priority | ||||
| logHistory: number | logHistory: number | ||||
| customTools: string | number | customTools: string | number |
| onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void | onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void | ||||
| onFileListUpdate?: (files: FileItem[]) => void | onFileListUpdate?: (files: FileItem[]) => void | ||||
| onPreview: (file: File) => void | onPreview: (file: File) => void | ||||
| notSupportBatchUpload?: boolean | |||||
| } | } | ||||
| const FileUploader = ({ | const FileUploader = ({ | ||||
| onFileUpdate, | onFileUpdate, | ||||
| onFileListUpdate, | onFileListUpdate, | ||||
| onPreview, | onPreview, | ||||
| notSupportBatchUpload, | |||||
| }: IFileUploaderProps) => { | }: IFileUploaderProps) => { | ||||
| const { t } = useTranslation() | const { t } = useTranslation() | ||||
| const { notify } = useContext(ToastContext) | const { notify } = useContext(ToastContext) | ||||
| const dropRef = useRef<HTMLDivElement>(null) | const dropRef = useRef<HTMLDivElement>(null) | ||||
| const dragRef = useRef<HTMLDivElement>(null) | const dragRef = useRef<HTMLDivElement>(null) | ||||
| const fileUploader = useRef<HTMLInputElement>(null) | const fileUploader = useRef<HTMLInputElement>(null) | ||||
| const hideUpload = notSupportBatchUpload && fileList.length > 0 | |||||
| const { data: fileUploadConfigResponse } = useSWR({ url: '/files/upload' }, fetchFileUploadConfig) | const { data: fileUploadConfigResponse } = useSWR({ url: '/files/upload' }, fetchFileUploadConfig) | ||||
| const { data: supportFileTypesResponse } = useSWR({ url: '/files/support-type' }, fetchSupportFileTypes) | const { data: supportFileTypesResponse } = useSWR({ url: '/files/support-type' }, fetchSupportFileTypes) | ||||
| xhr: new XMLHttpRequest(), | xhr: new XMLHttpRequest(), | ||||
| data: formData, | data: formData, | ||||
| onprogress: onProgress, | onprogress: onProgress, | ||||
| }) | |||||
| }, false, undefined, '?source=datasets') | |||||
| .then((res: File) => { | .then((res: File) => { | ||||
| const completeFile = { | const completeFile = { | ||||
| fileID: fileItem.fileID, | fileID: fileItem.fileID, | ||||
| onFileUpdate(completeFile, 100, fileListCopy) | onFileUpdate(completeFile, 100, fileListCopy) | ||||
| return Promise.resolve({ ...completeFile }) | return Promise.resolve({ ...completeFile }) | ||||
| }) | }) | ||||
| .catch(() => { | |||||
| notify({ type: 'error', message: t('datasetCreation.stepOne.uploader.failed') }) | |||||
| .catch((e) => { | |||||
| notify({ type: 'error', message: e?.response?.code === 'forbidden' ? e?.response?.message : t('datasetCreation.stepOne.uploader.failed') }) | |||||
| onFileUpdate(fileItem, -2, fileListCopy) | onFileUpdate(fileItem, -2, fileListCopy) | ||||
| return Promise.resolve({ ...fileItem }) | return Promise.resolve({ ...fileItem }) | ||||
| }) | }) | ||||
| return ( | return ( | ||||
| <div className={s.fileUploader}> | <div className={s.fileUploader}> | ||||
| <input | |||||
| ref={fileUploader} | |||||
| id="fileUploader" | |||||
| style={{ display: 'none' }} | |||||
| type="file" | |||||
| multiple | |||||
| accept={ACCEPTS.join(',')} | |||||
| onChange={fileChangeHandle} | |||||
| /> | |||||
| {!hideUpload && ( | |||||
| <input | |||||
| ref={fileUploader} | |||||
| id="fileUploader" | |||||
| style={{ display: 'none' }} | |||||
| type="file" | |||||
| multiple={!notSupportBatchUpload} | |||||
| accept={ACCEPTS.join(',')} | |||||
| onChange={fileChangeHandle} | |||||
| /> | |||||
| )} | |||||
| <div className={cn(s.title, titleClassName)}>{t('datasetCreation.stepOne.uploader.title')}</div> | <div className={cn(s.title, titleClassName)}>{t('datasetCreation.stepOne.uploader.title')}</div> | ||||
| <div ref={dropRef} className={cn(s.uploader, dragging && s.dragging)}> | |||||
| <div className='flex justify-center items-center min-h-6 mb-2'> | |||||
| <span className={s.uploadIcon} /> | |||||
| <span> | |||||
| {t('datasetCreation.stepOne.uploader.button')} | |||||
| <label className={s.browse} onClick={selectHandle}>{t('datasetCreation.stepOne.uploader.browse')}</label> | |||||
| </span> | |||||
| {!hideUpload && ( | |||||
| <div ref={dropRef} className={cn(s.uploader, dragging && s.dragging)}> | |||||
| <div className='flex justify-center items-center min-h-6 mb-2'> | |||||
| <span className={s.uploadIcon} /> | |||||
| <span> | |||||
| {t('datasetCreation.stepOne.uploader.button')} | |||||
| <label className={s.browse} onClick={selectHandle}>{t('datasetCreation.stepOne.uploader.browse')}</label> | |||||
| </span> | |||||
| </div> | |||||
| <div className={s.tip}>{t('datasetCreation.stepOne.uploader.tip', { | |||||
| size: fileUploadConfig.file_size_limit, | |||||
| supportTypes: supportTypesShowNames, | |||||
| })}</div> | |||||
| {dragging && <div ref={dragRef} className={s.draggingCover} />} | |||||
| </div> | </div> | ||||
| <div className={s.tip}>{t('datasetCreation.stepOne.uploader.tip', { | |||||
| size: fileUploadConfig.file_size_limit, | |||||
| supportTypes: supportTypesShowNames, | |||||
| })}</div> | |||||
| {dragging && <div ref={dragRef} className={s.draggingCover} />} | |||||
| </div> | |||||
| )} | |||||
| <div className={s.fileList}> | <div className={s.fileList}> | ||||
| {fileList.map((fileItem, index) => ( | {fileList.map((fileItem, index) => ( | ||||
| <div | <div |
| return ( | return ( | ||||
| <div className={s.notionConnectionTip}> | <div className={s.notionConnectionTip}> | ||||
| <span className={s.notionIcon}/> | |||||
| <span className={s.notionIcon} /> | |||||
| <div className={s.title}>{t('datasetCreation.stepOne.notionSyncTitle')}</div> | <div className={s.title}>{t('datasetCreation.stepOne.notionSyncTitle')}</div> | ||||
| <div className={s.tip}>{t('datasetCreation.stepOne.notionSyncTip')}</div> | <div className={s.tip}>{t('datasetCreation.stepOne.notionSyncTip')}</div> | ||||
| <Button className='h-8' type='primary' onClick={onSetting}>{t('datasetCreation.stepOne.connect')}</Button> | <Button className='h-8' type='primary' onClick={onSetting}>{t('datasetCreation.stepOne.connect')}</Button> | ||||
| const hasNotin = notionPages.length > 0 | const hasNotin = notionPages.length > 0 | ||||
| const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace | const isVectorSpaceFull = plan.usage.vectorSpace >= plan.total.vectorSpace | ||||
| const isShowVectorSpaceFull = (allFileLoaded || hasNotin) && isVectorSpaceFull && enableBilling | const isShowVectorSpaceFull = (allFileLoaded || hasNotin) && isVectorSpaceFull && enableBilling | ||||
| const notSupportBatchUpload = enableBilling && plan.type === 'sandbox' | |||||
| const nextDisabled = useMemo(() => { | const nextDisabled = useMemo(() => { | ||||
| if (!files.length) | if (!files.length) | ||||
| return true | return true | ||||
| onFileListUpdate={updateFileList} | onFileListUpdate={updateFileList} | ||||
| onFileUpdate={updateFile} | onFileUpdate={updateFile} | ||||
| onPreview={updateCurrentFile} | onPreview={updateCurrentFile} | ||||
| notSupportBatchUpload={notSupportBatchUpload} | |||||
| /> | /> | ||||
| {isShowVectorSpaceFull && ( | {isShowVectorSpaceFull && ( | ||||
| <div className='max-w-[640px] mb-4'> | <div className='max-w-[640px] mb-4'> |
| vectorSpace: 'Vector Space', | vectorSpace: 'Vector Space', | ||||
| vectorSpaceBillingTooltip: 'Each 1MB can store about 1.2million characters of vectorized data(estimated using OpenAI Embeddings, varies across models).', | vectorSpaceBillingTooltip: 'Each 1MB can store about 1.2million characters of vectorized data(estimated using OpenAI Embeddings, varies across models).', | ||||
| vectorSpaceTooltip: 'Vector Space is the long-term memory system required for LLMs to comprehend your data.', | vectorSpaceTooltip: 'Vector Space is the long-term memory system required for LLMs to comprehend your data.', | ||||
| documentsUploadQuota: 'Documents Upload Quota', | |||||
| documentProcessingPriority: 'Document Processing Priority', | documentProcessingPriority: 'Document Processing Priority', | ||||
| documentProcessingPriorityTip: 'For higher document processing priority, please upgrade your plan.', | documentProcessingPriorityTip: 'For higher document processing priority, please upgrade your plan.', | ||||
| documentProcessingPriorityUpgrade: 'Process more data with higher accuracy at faster speeds.', | documentProcessingPriorityUpgrade: 'Process more data with higher accuracy at faster speeds.', | ||||
| dedicatedAPISupport: 'Dedicated API support', | dedicatedAPISupport: 'Dedicated API support', | ||||
| customIntegration: 'Custom integration and support', | customIntegration: 'Custom integration and support', | ||||
| ragAPIRequest: 'RAG API Requests', | ragAPIRequest: 'RAG API Requests', | ||||
| bulkUpload: 'Bulk upload documents', | |||||
| agentMode: 'Agent Mode', | agentMode: 'Agent Mode', | ||||
| workflow: 'Workflow', | workflow: 'Workflow', | ||||
| }, | }, |
| vectorSpace: '向量空间', | vectorSpace: '向量空间', | ||||
| vectorSpaceTooltip: '向量空间是 LLMs 理解您的数据所需的长期记忆系统。', | vectorSpaceTooltip: '向量空间是 LLMs 理解您的数据所需的长期记忆系统。', | ||||
| vectorSpaceBillingTooltip: '向量存储是将知识库向量化处理后为让 LLMs 理解数据而使用的长期记忆存储,1MB 大约能满足1.2 million character 的向量化后数据存储(以 OpenAI Embedding 模型估算,不同模型计算方式有差异)。在向量化过程中,实际的压缩或尺寸减小取决于内容的复杂性和冗余性。', | vectorSpaceBillingTooltip: '向量存储是将知识库向量化处理后为让 LLMs 理解数据而使用的长期记忆存储,1MB 大约能满足1.2 million character 的向量化后数据存储(以 OpenAI Embedding 模型估算,不同模型计算方式有差异)。在向量化过程中,实际的压缩或尺寸减小取决于内容的复杂性和冗余性。', | ||||
| documentsUploadQuota: '文档上传配额', | |||||
| documentProcessingPriority: '文档处理优先级', | documentProcessingPriority: '文档处理优先级', | ||||
| documentProcessingPriorityTip: '如需更高的文档处理优先级,请升级您的套餐', | documentProcessingPriorityTip: '如需更高的文档处理优先级,请升级您的套餐', | ||||
| documentProcessingPriorityUpgrade: '以更快的速度、更高的精度处理更多的数据。', | documentProcessingPriorityUpgrade: '以更快的速度、更高的精度处理更多的数据。', | ||||
| dedicatedAPISupport: '专用 API 支持', | dedicatedAPISupport: '专用 API 支持', | ||||
| customIntegration: '自定义集成和支持', | customIntegration: '自定义集成和支持', | ||||
| ragAPIRequest: 'RAG API 请求', | ragAPIRequest: 'RAG API 请求', | ||||
| bulkUpload: '批量上传文档', | |||||
| agentMode: '代理模式', | agentMode: '代理模式', | ||||
| workflow: '工作流', | workflow: '工作流', | ||||
| }, | }, |
| ]) as Promise<T> | ]) as Promise<T> | ||||
| } | } | ||||
| export const upload = (options: any, isPublicAPI?: boolean, url?: string): Promise<any> => { | |||||
| export const upload = (options: any, isPublicAPI?: boolean, url?: string, searchParams?: string): Promise<any> => { | |||||
| const urlPrefix = isPublicAPI ? PUBLIC_API_PREFIX : API_PREFIX | const urlPrefix = isPublicAPI ? PUBLIC_API_PREFIX : API_PREFIX | ||||
| let token = '' | let token = '' | ||||
| if (isPublicAPI) { | if (isPublicAPI) { | ||||
| } | } | ||||
| const defaultOptions = { | const defaultOptions = { | ||||
| method: 'POST', | method: 'POST', | ||||
| url: url ? `${urlPrefix}${url}` : `${urlPrefix}/files/upload`, | |||||
| url: (url ? `${urlPrefix}${url}` : `${urlPrefix}/files/upload`) + (searchParams || ''), | |||||
| headers: { | headers: { | ||||
| Authorization: `Bearer ${token}`, | Authorization: `Bearer ${token}`, | ||||
| }, | }, |