### What problem does this PR solve? ### Type of change - [x] Documentation Updatetags/v0.4.0
| ## 📌 Latest Features | ## 📌 Latest Features | ||||
| - 2024-04-26 Add file management. | |||||
| - 2024-04-19 Support conversation API ([detail](./docs/conversation_api.md)). | - 2024-04-19 Support conversation API ([detail](./docs/conversation_api.md)). | ||||
| - 2024-04-16 Add an embedding model 'bce-embedding-base_v1' from [BCEmbedding](https://github.com/netease-youdao/BCEmbedding). | - 2024-04-16 Add an embedding model 'bce-embedding-base_v1' from [BCEmbedding](https://github.com/netease-youdao/BCEmbedding). | ||||
| - 2024-04-16 Add [FastEmbed](https://github.com/qdrant/fastembed), which is designed specifically for light and speedy embedding. | - 2024-04-16 Add [FastEmbed](https://github.com/qdrant/fastembed), which is designed specifically for light and speedy embedding. |
| ## 📌 最新の機能 | ## 📌 最新の機能 | ||||
| - 2024-04-26 「ファイル管理」機能を追加しました。 | |||||
| - 2024-04-19 会話 API をサポートします ([詳細](./docs/conversation_api.md))。 | - 2024-04-19 会話 API をサポートします ([詳細](./docs/conversation_api.md))。 | ||||
| - 2024-04-16 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) から埋め込みモデル「bce-embedding-base_v1」を追加します。 | - 2024-04-16 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) から埋め込みモデル「bce-embedding-base_v1」を追加します。 | ||||
| - 2024-04-16 [FastEmbed](https://github.com/qdrant/fastembed) は、軽量かつ高速な埋め込み用に設計されています。 | - 2024-04-16 [FastEmbed](https://github.com/qdrant/fastembed) は、軽量かつ高速な埋め込み用に設計されています。 |
| ## 📌 新增功能 | ## 📌 新增功能 | ||||
| - 2024-04-26 增添了'文件管理'功能. | |||||
| - 2024-04-19 支持对话 API ([更多](./docs/conversation_api.md)). | - 2024-04-19 支持对话 API ([更多](./docs/conversation_api.md)). | ||||
| - 2024-04-16 添加嵌入模型 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) 。 | - 2024-04-16 添加嵌入模型 [BCEmbedding](https://github.com/netease-youdao/BCEmbedding) 。 | ||||
| - 2024-04-16 添加 [FastEmbed](https://github.com/qdrant/fastembed) 专为轻型和高速嵌入而设计。 | - 2024-04-16 添加 [FastEmbed](https://github.com/qdrant/fastembed) 专为轻型和高速嵌入而设计。 |
| from elasticsearch_dsl import Q | from elasticsearch_dsl import Q | ||||
| from flask import request | from flask import request | ||||
| from flask_login import login_required, current_user | from flask_login import login_required, current_user | ||||
| from api.db.services.file2document_service import File2DocumentService | |||||
| from api.db.services.file_service import FileService | |||||
| from rag.nlp import search | from rag.nlp import search | ||||
| from rag.utils import ELASTICSEARCH | from rag.utils import ELASTICSEARCH | ||||
| from api.db.services import duplicate_name | from api.db.services import duplicate_name | ||||
| name=file.filename, | name=file.filename, | ||||
| kb_id=kb.id) | kb_id=kb.id) | ||||
| filetype = filename_type(filename) | filetype = filename_type(filename) | ||||
| if not filetype: | |||||
| if filetype == FileType.OTHER.value: | |||||
| return get_data_error_result( | return get_data_error_result( | ||||
| retmsg="This type of file has not been supported yet!") | retmsg="This type of file has not been supported yet!") | ||||
| @validate_request("doc_id") | @validate_request("doc_id") | ||||
| def rm(): | def rm(): | ||||
| req = request.json | req = request.json | ||||
| try: | |||||
| e, doc = DocumentService.get_by_id(req["doc_id"]) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| tenant_id = DocumentService.get_tenant_id(req["doc_id"]) | |||||
| if not tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| DocumentService.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not DocumentService.delete(doc): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (Document removal)!") | |||||
| doc_ids = req["doc_id"] | |||||
| if isinstance(doc_ids, str): doc_ids = [doc_ids] | |||||
| errors = "" | |||||
| for doc_id in doc_ids: | |||||
| try: | |||||
| e, doc = DocumentService.get_by_id(doc_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| tenant_id = DocumentService.get_tenant_id(doc_id) | |||||
| if not tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| DocumentService.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not DocumentService.delete(doc): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (Document removal)!") | |||||
| informs = File2DocumentService.get_by_document_id(doc_id) | |||||
| if not informs: | |||||
| MINIO.rm(doc.kb_id, doc.location) | |||||
| else: | |||||
| File2DocumentService.delete_by_document_id(doc_id) | |||||
| except Exception as e: | |||||
| errors += str(e) | |||||
| if errors: return server_error_response(e) | |||||
| return get_json_result(data=True) | |||||
| MINIO.rm(doc.kb_id, doc.location) | |||||
| return get_json_result(data=True) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/run', methods=['POST']) | @manager.route('/run', methods=['POST']) | ||||
| if not e: | if not e: | ||||
| return get_data_error_result(retmsg="Document not found!") | return get_data_error_result(retmsg="Document not found!") | ||||
| response = flask.make_response(MINIO.get(doc.kb_id, doc.location)) | |||||
| informs = File2DocumentService.get_by_document_id(doc_id) | |||||
| if not informs: | |||||
| response = flask.make_response(MINIO.get(doc.kb_id, doc.location)) | |||||
| else: | |||||
| e, file = FileService.get_by_id(informs[0].file_id) | |||||
| response = flask.make_response(MINIO.get(file.parent_id, doc.location)) | |||||
| ext = re.search(r"\.([^.]+)$", doc.name) | ext = re.search(r"\.([^.]+)$", doc.name) | ||||
| if ext: | if ext: | ||||
| if doc.type == FileType.VISUAL.value: | if doc.type == FileType.VISUAL.value: |
| # | |||||
| # Copyright 2024 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License | |||||
| # | |||||
| from elasticsearch_dsl import Q | |||||
| from api.db.db_models import File2Document | |||||
| from api.db.services.file2document_service import File2DocumentService | |||||
| from api.db.services.file_service import FileService | |||||
| from flask import request | |||||
| from flask_login import login_required, current_user | |||||
| from api.db.services.knowledgebase_service import KnowledgebaseService | |||||
| from api.utils.api_utils import server_error_response, get_data_error_result, validate_request | |||||
| from api.utils import get_uuid | |||||
| from api.db import FileType | |||||
| from api.db.services.document_service import DocumentService | |||||
| from api.settings import RetCode | |||||
| from api.utils.api_utils import get_json_result | |||||
| from rag.nlp import search | |||||
| from rag.utils import ELASTICSEARCH | |||||
| @manager.route('/convert', methods=['POST']) | |||||
| @login_required | |||||
| @validate_request("file_ids", "kb_ids") | |||||
| def convert(): | |||||
| req = request.json | |||||
| kb_ids = req["kb_ids"] | |||||
| file_ids = req["file_ids"] | |||||
| file2documents = [] | |||||
| try: | |||||
| for file_id in file_ids: | |||||
| e, file = FileService.get_by_id(file_id) | |||||
| file_ids_list = [file_id] | |||||
| if file.type == FileType.FOLDER: | |||||
| file_ids_list = FileService.get_all_innermost_file_ids(file_id, []) | |||||
| for id in file_ids_list: | |||||
| informs = File2DocumentService.get_by_file_id(id) | |||||
| # delete | |||||
| for inform in informs: | |||||
| doc_id = inform.document_id | |||||
| e, doc = DocumentService.get_by_id(doc_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| tenant_id = DocumentService.get_tenant_id(doc_id) | |||||
| if not tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| DocumentService.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not DocumentService.delete(doc): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (Document removal)!") | |||||
| File2DocumentService.delete_by_file_id(id) | |||||
| # insert | |||||
| for kb_id in kb_ids: | |||||
| e, kb = KnowledgebaseService.get_by_id(kb_id) | |||||
| if not e: | |||||
| return get_data_error_result( | |||||
| retmsg="Can't find this knowledgebase!") | |||||
| e, file = FileService.get_by_id(id) | |||||
| if not e: | |||||
| return get_data_error_result( | |||||
| retmsg="Can't find this file!") | |||||
| doc = DocumentService.insert({ | |||||
| "id": get_uuid(), | |||||
| "kb_id": kb.id, | |||||
| "parser_id": kb.parser_id, | |||||
| "parser_config": kb.parser_config, | |||||
| "created_by": current_user.id, | |||||
| "type": file.type, | |||||
| "name": file.name, | |||||
| "location": file.location, | |||||
| "size": file.size | |||||
| }) | |||||
| file2document = File2DocumentService.insert({ | |||||
| "id": get_uuid(), | |||||
| "file_id": id, | |||||
| "document_id": doc.id, | |||||
| }) | |||||
| file2documents.append(file2document.to_json()) | |||||
| return get_json_result(data=file2documents) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/rm', methods=['POST']) | |||||
| @login_required | |||||
| @validate_request("file_ids") | |||||
| def rm(): | |||||
| req = request.json | |||||
| file_ids = req["file_ids"] | |||||
| if not file_ids: | |||||
| return get_json_result( | |||||
| data=False, retmsg='Lack of "Files ID"', retcode=RetCode.ARGUMENT_ERROR) | |||||
| try: | |||||
| for file_id in file_ids: | |||||
| informs = File2DocumentService.get_by_file_id(file_id) | |||||
| if not informs: | |||||
| return get_data_error_result(retmsg="Inform not found!") | |||||
| for inform in informs: | |||||
| if not inform: | |||||
| return get_data_error_result(retmsg="Inform not found!") | |||||
| File2DocumentService.delete_by_file_id(file_id) | |||||
| doc_id = inform.document_id | |||||
| e, doc = DocumentService.get_by_id(doc_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| tenant_id = DocumentService.get_tenant_id(doc_id) | |||||
| if not tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| DocumentService.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not DocumentService.delete(doc): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (Document removal)!") | |||||
| return get_json_result(data=True) | |||||
| except Exception as e: | |||||
| return server_error_response(e) |
| # | |||||
| # Copyright 2024 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License | |||||
| # | |||||
| import os | |||||
| import pathlib | |||||
| import re | |||||
| import flask | |||||
| from elasticsearch_dsl import Q | |||||
| from flask import request | |||||
| from flask_login import login_required, current_user | |||||
| from api.db.services.document_service import DocumentService | |||||
| from api.db.services.file2document_service import File2DocumentService | |||||
| from api.utils.api_utils import server_error_response, get_data_error_result, validate_request | |||||
| from api.utils import get_uuid | |||||
| from api.db import FileType | |||||
| from api.db.services import duplicate_name | |||||
| from api.db.services.file_service import FileService | |||||
| from api.settings import RetCode | |||||
| from api.utils.api_utils import get_json_result | |||||
| from api.utils.file_utils import filename_type | |||||
| from rag.nlp import search | |||||
| from rag.utils import ELASTICSEARCH | |||||
| from rag.utils.minio_conn import MINIO | |||||
| @manager.route('/upload', methods=['POST']) | |||||
| @login_required | |||||
| # @validate_request("parent_id") | |||||
| def upload(): | |||||
| pf_id = request.form.get("parent_id") | |||||
| if not pf_id: | |||||
| root_folder = FileService.get_root_folder(current_user.id) | |||||
| pf_id = root_folder.id | |||||
| if 'file' not in request.files: | |||||
| return get_json_result( | |||||
| data=False, retmsg='No file part!', retcode=RetCode.ARGUMENT_ERROR) | |||||
| file_objs = request.files.getlist('file') | |||||
| for file_obj in file_objs: | |||||
| if file_obj.filename == '': | |||||
| return get_json_result( | |||||
| data=False, retmsg='No file selected!', retcode=RetCode.ARGUMENT_ERROR) | |||||
| file_res = [] | |||||
| try: | |||||
| for file_obj in file_objs: | |||||
| e, file = FileService.get_by_id(pf_id) | |||||
| if not e: | |||||
| return get_data_error_result( | |||||
| retmsg="Can't find this folder!") | |||||
| MAX_FILE_NUM_PER_USER = int(os.environ.get('MAX_FILE_NUM_PER_USER', 0)) | |||||
| if MAX_FILE_NUM_PER_USER > 0 and DocumentService.get_doc_count(kb.tenant_id) >= MAX_FILE_NUM_PER_USER: | |||||
| return get_data_error_result( | |||||
| retmsg="Exceed the maximum file number of a free user!") | |||||
| # split file name path | |||||
| if not file_obj.filename: | |||||
| e, file = FileService.get_by_id(pf_id) | |||||
| file_obj_names = [file.name, file_obj.filename] | |||||
| else: | |||||
| full_path = '/' + file_obj.filename | |||||
| file_obj_names = full_path.split('/') | |||||
| file_len = len(file_obj_names) | |||||
| # get folder | |||||
| file_id_list = FileService.get_id_list_by_id(pf_id, file_obj_names, 1, [pf_id]) | |||||
| len_id_list = len(file_id_list) | |||||
| # create folder | |||||
| if file_len != len_id_list: | |||||
| e, file = FileService.get_by_id(file_id_list[len_id_list - 1]) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Folder not found!") | |||||
| last_folder = FileService.create_folder(file, file_id_list[len_id_list - 1], file_obj_names, | |||||
| len_id_list) | |||||
| else: | |||||
| e, file = FileService.get_by_id(file_id_list[len_id_list - 2]) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Folder not found!") | |||||
| last_folder = FileService.create_folder(file, file_id_list[len_id_list - 2], file_obj_names, | |||||
| len_id_list) | |||||
| # file type | |||||
| filetype = filename_type(file_obj_names[file_len - 1]) | |||||
| location = file_obj_names[file_len - 1] | |||||
| while MINIO.obj_exist(last_folder.id, location): | |||||
| location += "_" | |||||
| blob = file_obj.read() | |||||
| filename = duplicate_name( | |||||
| FileService.query, | |||||
| name=file_obj_names[file_len - 1], | |||||
| parent_id=last_folder.id) | |||||
| file = { | |||||
| "id": get_uuid(), | |||||
| "parent_id": last_folder.id, | |||||
| "tenant_id": current_user.id, | |||||
| "created_by": current_user.id, | |||||
| "type": filetype, | |||||
| "name": filename, | |||||
| "location": location, | |||||
| "size": len(blob), | |||||
| } | |||||
| file = FileService.insert(file) | |||||
| MINIO.put(last_folder.id, location, blob) | |||||
| file_res.append(file.to_json()) | |||||
| return get_json_result(data=file_res) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/create', methods=['POST']) | |||||
| @login_required | |||||
| @validate_request("name") | |||||
| def create(): | |||||
| req = request.json | |||||
| pf_id = request.json.get("parent_id") | |||||
| input_file_type = request.json.get("type") | |||||
| if not pf_id: | |||||
| root_folder = FileService.get_root_folder(current_user.id) | |||||
| pf_id = root_folder.id | |||||
| try: | |||||
| if not FileService.is_parent_folder_exist(pf_id): | |||||
| return get_json_result( | |||||
| data=False, retmsg="Parent Folder Doesn't Exist!", retcode=RetCode.OPERATING_ERROR) | |||||
| if FileService.query(name=req["name"], parent_id=pf_id): | |||||
| return get_data_error_result( | |||||
| retmsg="Duplicated folder name in the same folder.") | |||||
| if input_file_type == FileType.FOLDER.value: | |||||
| file_type = FileType.FOLDER | |||||
| else: | |||||
| file_type = FileType.VIRTUAL | |||||
| file = FileService.insert({ | |||||
| "id": get_uuid(), | |||||
| "parent_id": pf_id, | |||||
| "tenant_id": current_user.id, | |||||
| "created_by": current_user.id, | |||||
| "name": req["name"], | |||||
| "location": "", | |||||
| "size": 0, | |||||
| "type": file_type | |||||
| }) | |||||
| return get_json_result(data=file.to_json()) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/list', methods=['GET']) | |||||
| @login_required | |||||
| def list(): | |||||
| pf_id = request.args.get("parent_id") | |||||
| keywords = request.args.get("keywords", "") | |||||
| page_number = int(request.args.get("page", 1)) | |||||
| items_per_page = int(request.args.get("page_size", 15)) | |||||
| orderby = request.args.get("orderby", "create_time") | |||||
| desc = request.args.get("desc", True) | |||||
| if not pf_id: | |||||
| root_folder = FileService.get_root_folder(current_user.id) | |||||
| pf_id = root_folder.id | |||||
| try: | |||||
| e, file = FileService.get_by_id(pf_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Folder not found!") | |||||
| files, total = FileService.get_by_pf_id( | |||||
| current_user.id, pf_id, page_number, items_per_page, orderby, desc, keywords) | |||||
| parent_folder = FileService.get_parent_folder(pf_id) | |||||
| if not FileService.get_parent_folder(pf_id): | |||||
| return get_json_result(retmsg="File not found!") | |||||
| return get_json_result(data={"total": total, "files": files, "parent_folder": parent_folder.to_json()}) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/root_folder', methods=['GET']) | |||||
| @login_required | |||||
| def get_root_folder(): | |||||
| try: | |||||
| root_folder = FileService.get_root_folder(current_user.id) | |||||
| return get_json_result(data={"root_folder": root_folder.to_json()}) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/parent_folder', methods=['GET']) | |||||
| @login_required | |||||
| def get_parent_folder(): | |||||
| file_id = request.args.get("file_id") | |||||
| try: | |||||
| e, file = FileService.get_by_id(file_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Folder not found!") | |||||
| parent_folder = FileService.get_parent_folder(file_id) | |||||
| return get_json_result(data={"parent_folder": parent_folder.to_json()}) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/all_parent_folder', methods=['GET']) | |||||
| @login_required | |||||
| def get_all_parent_folders(): | |||||
| file_id = request.args.get("file_id") | |||||
| try: | |||||
| e, file = FileService.get_by_id(file_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Folder not found!") | |||||
| parent_folders = FileService.get_all_parent_folders(file_id) | |||||
| parent_folders_res = [] | |||||
| for parent_folder in parent_folders: | |||||
| parent_folders_res.append(parent_folder.to_json()) | |||||
| return get_json_result(data={"parent_folders": parent_folders_res}) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/rm', methods=['POST']) | |||||
| @login_required | |||||
| @validate_request("file_ids") | |||||
| def rm(): | |||||
| req = request.json | |||||
| file_ids = req["file_ids"] | |||||
| try: | |||||
| for file_id in file_ids: | |||||
| e, file = FileService.get_by_id(file_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="File or Folder not found!") | |||||
| if not file.tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| if file.type == FileType.FOLDER: | |||||
| file_id_list = FileService.get_all_innermost_file_ids(file_id, []) | |||||
| for inner_file_id in file_id_list: | |||||
| e, file = FileService.get_by_id(inner_file_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="File not found!") | |||||
| MINIO.rm(file.parent_id, file.location) | |||||
| FileService.delete_folder_by_pf_id(current_user.id, file_id) | |||||
| else: | |||||
| if not FileService.delete(file): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (File removal)!") | |||||
| # delete file2document | |||||
| informs = File2DocumentService.get_by_file_id(file_id) | |||||
| for inform in informs: | |||||
| doc_id = inform.document_id | |||||
| e, doc = DocumentService.get_by_id(doc_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| tenant_id = DocumentService.get_tenant_id(doc_id) | |||||
| if not tenant_id: | |||||
| return get_data_error_result(retmsg="Tenant not found!") | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| DocumentService.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not DocumentService.delete(doc): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (Document removal)!") | |||||
| File2DocumentService.delete_by_file_id(file_id) | |||||
| return get_json_result(data=True) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/rename', methods=['POST']) | |||||
| @login_required | |||||
| @validate_request("file_id", "name") | |||||
| def rename(): | |||||
| req = request.json | |||||
| try: | |||||
| e, file = FileService.get_by_id(req["file_id"]) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="File not found!") | |||||
| if pathlib.Path(req["name"].lower()).suffix != pathlib.Path( | |||||
| file.name.lower()).suffix: | |||||
| return get_json_result( | |||||
| data=False, | |||||
| retmsg="The extension of file can't be changed", | |||||
| retcode=RetCode.ARGUMENT_ERROR) | |||||
| if FileService.query(name=req["name"], pf_id=file.parent_id): | |||||
| return get_data_error_result( | |||||
| retmsg="Duplicated file name in the same folder.") | |||||
| if not FileService.update_by_id( | |||||
| req["file_id"], {"name": req["name"]}): | |||||
| return get_data_error_result( | |||||
| retmsg="Database error (File rename)!") | |||||
| return get_json_result(data=True) | |||||
| except Exception as e: | |||||
| return server_error_response(e) | |||||
| @manager.route('/get/<file_id>', methods=['GET']) | |||||
| # @login_required | |||||
| def get(file_id): | |||||
| try: | |||||
| e, doc = FileService.get_by_id(file_id) | |||||
| if not e: | |||||
| return get_data_error_result(retmsg="Document not found!") | |||||
| response = flask.make_response(MINIO.get(doc.parent_id, doc.location)) | |||||
| ext = re.search(r"\.([^.]+)$", doc.name) | |||||
| if ext: | |||||
| if doc.type == FileType.VISUAL.value: | |||||
| response.headers.set('Content-Type', 'image/%s' % ext.group(1)) | |||||
| else: | |||||
| response.headers.set( | |||||
| 'Content-Type', | |||||
| 'application/%s' % | |||||
| ext.group(1)) | |||||
| return response | |||||
| except Exception as e: | |||||
| return server_error_response(e) |
| from api.settings import RetCode, GITHUB_OAUTH, CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, API_KEY, \ | from api.settings import RetCode, GITHUB_OAUTH, CHAT_MDL, EMBEDDING_MDL, ASR_MDL, IMAGE2TEXT_MDL, PARSERS, API_KEY, \ | ||||
| LLM_FACTORY, LLM_BASE_URL | LLM_FACTORY, LLM_BASE_URL | ||||
| from api.db.services.user_service import UserService, TenantService, UserTenantService | from api.db.services.user_service import UserService, TenantService, UserTenantService | ||||
| from api.db.services.file_service import FileService | |||||
| from api.settings import stat_logger | from api.settings import stat_logger | ||||
| from api.utils.api_utils import get_json_result, cors_reponse | from api.utils.api_utils import get_json_result, cors_reponse | ||||
| "invited_by": user_id, | "invited_by": user_id, | ||||
| "role": UserTenantRole.OWNER | "role": UserTenantRole.OWNER | ||||
| } | } | ||||
| file_id = get_uuid() | |||||
| file = { | |||||
| "id": file_id, | |||||
| "parent_id": file_id, | |||||
| "tenant_id": user_id, | |||||
| "created_by": user_id, | |||||
| "name": "/", | |||||
| "type": FileType.FOLDER, | |||||
| "size": 0, | |||||
| "location": "", | |||||
| } | |||||
| tenant_llm = [] | tenant_llm = [] | ||||
| for llm in LLMService.query(fid=LLM_FACTORY): | for llm in LLMService.query(fid=LLM_FACTORY): | ||||
| tenant_llm.append({"tenant_id": user_id, | tenant_llm.append({"tenant_id": user_id, | ||||
| TenantService.insert(**tenant) | TenantService.insert(**tenant) | ||||
| UserTenantService.insert(**usr_tenant) | UserTenantService.insert(**usr_tenant) | ||||
| TenantLLMService.insert_many(tenant_llm) | TenantLLMService.insert_many(tenant_llm) | ||||
| FileService.insert(file) | |||||
| return UserService.query(email=user["email"]) | return UserService.query(email=user["email"]) | ||||
| VISUAL = 'visual' | VISUAL = 'visual' | ||||
| AURAL = 'aural' | AURAL = 'aural' | ||||
| VIRTUAL = 'virtual' | VIRTUAL = 'virtual' | ||||
| FOLDER = 'folder' | |||||
| OTHER = "other" | |||||
| class LLMType(StrEnum): | class LLMType(StrEnum): |
| db_table = "document" | db_table = "document" | ||||
| class File(DataBaseModel): | |||||
| id = CharField( | |||||
| max_length=32, | |||||
| primary_key=True, | |||||
| ) | |||||
| parent_id = CharField( | |||||
| max_length=32, | |||||
| null=False, | |||||
| help_text="parent folder id", | |||||
| index=True) | |||||
| tenant_id = CharField( | |||||
| max_length=32, | |||||
| null=False, | |||||
| help_text="tenant id", | |||||
| index=True) | |||||
| created_by = CharField( | |||||
| max_length=32, | |||||
| null=False, | |||||
| help_text="who created it") | |||||
| name = CharField( | |||||
| max_length=255, | |||||
| null=False, | |||||
| help_text="file name or folder name", | |||||
| index=True) | |||||
| location = CharField( | |||||
| max_length=255, | |||||
| null=True, | |||||
| help_text="where dose it store") | |||||
| size = IntegerField(default=0) | |||||
| type = CharField(max_length=32, null=False, help_text="file extension") | |||||
| class Meta: | |||||
| db_table = "file" | |||||
| class File2Document(DataBaseModel): | |||||
| id = CharField( | |||||
| max_length=32, | |||||
| primary_key=True, | |||||
| ) | |||||
| file_id = CharField( | |||||
| max_length=32, | |||||
| null=True, | |||||
| help_text="file id", | |||||
| index=True) | |||||
| document_id = CharField( | |||||
| max_length=32, | |||||
| null=True, | |||||
| help_text="document id", | |||||
| index=True) | |||||
| class Meta: | |||||
| db_table = "file2document" | |||||
| class Task(DataBaseModel): | class Task(DataBaseModel): | ||||
| id = CharField(max_length=32, primary_key=True) | id = CharField(max_length=32, primary_key=True) | ||||
| doc_id = CharField(max_length=32, null=False, index=True) | doc_id = CharField(max_length=32, null=False, index=True) |
| # | # | ||||
| from peewee import Expression | from peewee import Expression | ||||
| from elasticsearch_dsl import Q | |||||
| from rag.utils import ELASTICSEARCH | |||||
| from rag.utils.minio_conn import MINIO | |||||
| from rag.nlp import search | |||||
| from api.db import FileType, TaskStatus | from api.db import FileType, TaskStatus | ||||
| from api.db.db_models import DB, Knowledgebase, Tenant | from api.db.db_models import DB, Knowledgebase, Tenant | ||||
| from api.db.db_models import Document | from api.db.db_models import Document | ||||
| raise RuntimeError("Database error (Knowledgebase)!") | raise RuntimeError("Database error (Knowledgebase)!") | ||||
| return cls.delete_by_id(doc.id) | return cls.delete_by_id(doc.id) | ||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def remove_document(cls, doc, tenant_id): | |||||
| ELASTICSEARCH.deleteByQuery( | |||||
| Q("match", doc_id=doc.id), idxnm=search.index_name(tenant_id)) | |||||
| cls.increment_chunk_num( | |||||
| doc.id, doc.kb_id, doc.token_num * -1, doc.chunk_num * -1, 0) | |||||
| if not cls.delete(doc): | |||||
| raise RuntimeError("Database error (Document removal)!") | |||||
| MINIO.rm(doc.kb_id, doc.location) | |||||
| return cls.delete_by_id(doc.id) | |||||
| @classmethod | @classmethod | ||||
| @DB.connection_context() | @DB.connection_context() | ||||
| def get_newly_uploaded(cls, tm, mod=0, comm=1, items_per_page=64): | def get_newly_uploaded(cls, tm, mod=0, comm=1, items_per_page=64): |
| # | |||||
| # Copyright 2024 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| from datetime import datetime | |||||
| from api.db.db_models import DB | |||||
| from api.db.db_models import File, Document, File2Document | |||||
| from api.db.services.common_service import CommonService | |||||
| from api.utils import current_timestamp, datetime_format | |||||
| class File2DocumentService(CommonService): | |||||
| model = File2Document | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_by_file_id(cls, file_id): | |||||
| objs = cls.model.select().where(cls.model.file_id == file_id) | |||||
| return objs | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_by_document_id(cls, document_id): | |||||
| objs = cls.model.select().where(cls.model.document_id == document_id) | |||||
| return objs | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def insert(cls, obj): | |||||
| if not cls.save(**obj): | |||||
| raise RuntimeError("Database error (File)!") | |||||
| e, obj = cls.get_by_id(obj["id"]) | |||||
| if not e: | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| return obj | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def delete_by_file_id(cls, file_id): | |||||
| return cls.model.delete().where(cls.model.file_id == file_id).execute() | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def delete_by_document_id(cls, doc_id): | |||||
| return cls.model.delete().where(cls.model.document_id == doc_id).execute() | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def update_by_file_id(cls, file_id, obj): | |||||
| obj["update_time"] = current_timestamp() | |||||
| obj["update_date"] = datetime_format(datetime.now()) | |||||
| num = cls.model.update(obj).where(cls.model.id == file_id).execute() | |||||
| e, obj = cls.get_by_id(cls.model.id) | |||||
| return obj |
| # | |||||
| # Copyright 2024 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| from flask_login import current_user | |||||
| from peewee import fn | |||||
| from api.db import FileType | |||||
| from api.db.db_models import DB, File2Document, Knowledgebase | |||||
| from api.db.db_models import File, Document | |||||
| from api.db.services.common_service import CommonService | |||||
| from api.utils import get_uuid | |||||
| from rag.utils import MINIO | |||||
| class FileService(CommonService): | |||||
| model = File | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_by_pf_id(cls, tenant_id, pf_id, page_number, items_per_page, | |||||
| orderby, desc, keywords): | |||||
| if keywords: | |||||
| files = cls.model.select().where( | |||||
| (cls.model.tenant_id == tenant_id) | |||||
| & (cls.model.parent_id == pf_id), (fn.LOWER(cls.model.name).like(f"%%{keywords.lower()}%%"))) | |||||
| else: | |||||
| files = cls.model.select().where((cls.model.tenant_id == tenant_id) | |||||
| & (cls.model.parent_id == pf_id)) | |||||
| count = files.count() | |||||
| if desc: | |||||
| files = files.order_by(cls.model.getter_by(orderby).desc()) | |||||
| else: | |||||
| files = files.order_by(cls.model.getter_by(orderby).asc()) | |||||
| files = files.paginate(page_number, items_per_page) | |||||
| res_files = list(files.dicts()) | |||||
| for file in res_files: | |||||
| if file["type"] == FileType.FOLDER.value: | |||||
| file["size"] = cls.get_folder_size(file["id"]) | |||||
| file['kbs_info'] = [] | |||||
| continue | |||||
| kbs_info = cls.get_kb_id_by_file_id(file['id']) | |||||
| file['kbs_info'] = kbs_info | |||||
| return res_files, count | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_kb_id_by_file_id(cls, file_id): | |||||
| kbs = (cls.model.select(*[Knowledgebase.id, Knowledgebase.name]) | |||||
| .join(File2Document, on=(File2Document.file_id == file_id)) | |||||
| .join(Document, on=(File2Document.document_id == Document.id)) | |||||
| .join(Knowledgebase, on=(Knowledgebase.id == Document.kb_id)) | |||||
| .where(cls.model.id == file_id)) | |||||
| if not kbs: return [] | |||||
| kbs_info_list = [] | |||||
| for kb in list(kbs.dicts()): | |||||
| kbs_info_list.append({"kb_id": kb['id'], "kb_name": kb['name']}) | |||||
| return kbs_info_list | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_by_pf_id_name(cls, id, name): | |||||
| file = cls.model.select().where((cls.model.parent_id == id) & (cls.model.name == name)) | |||||
| if file.count(): | |||||
| e, file = cls.get_by_id(file[0].id) | |||||
| if not e: | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| return file | |||||
| return None | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_id_list_by_id(cls, id, name, count, res): | |||||
| if count < len(name): | |||||
| file = cls.get_by_pf_id_name(id, name[count]) | |||||
| if file: | |||||
| res.append(file.id) | |||||
| return cls.get_id_list_by_id(file.id, name, count + 1, res) | |||||
| else: | |||||
| return res | |||||
| else: | |||||
| return res | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_all_innermost_file_ids(cls, folder_id, result_ids): | |||||
| subfolders = cls.model.select().where(cls.model.parent_id == folder_id) | |||||
| if subfolders.exists(): | |||||
| for subfolder in subfolders: | |||||
| cls.get_all_innermost_file_ids(subfolder.id, result_ids) | |||||
| else: | |||||
| result_ids.append(folder_id) | |||||
| return result_ids | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def create_folder(cls, file, parent_id, name, count): | |||||
| if count > len(name) - 2: | |||||
| return file | |||||
| else: | |||||
| file = cls.insert({ | |||||
| "id": get_uuid(), | |||||
| "parent_id": parent_id, | |||||
| "tenant_id": current_user.id, | |||||
| "created_by": current_user.id, | |||||
| "name": name[count], | |||||
| "location": "", | |||||
| "size": 0, | |||||
| "type": FileType.FOLDER | |||||
| }) | |||||
| return cls.create_folder(file, file.id, name, count + 1) | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def is_parent_folder_exist(cls, parent_id): | |||||
| parent_files = cls.model.select().where(cls.model.id == parent_id) | |||||
| if parent_files.count(): | |||||
| return True | |||||
| cls.delete_folder_by_pf_id(parent_id) | |||||
| return False | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_root_folder(cls, tenant_id): | |||||
| file = cls.model.select().where(cls.model.tenant_id == tenant_id and | |||||
| cls.model.parent_id == cls.model.id) | |||||
| e, file = cls.get_by_id(file[0].id) | |||||
| if not e: | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| return file | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_parent_folder(cls, file_id): | |||||
| file = cls.model.select().where(cls.model.id == file_id) | |||||
| if file.count(): | |||||
| e, file = cls.get_by_id(file[0].parent_id) | |||||
| if not e: | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| else: | |||||
| raise RuntimeError("Database error (File doesn't exist)!") | |||||
| return file | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_all_parent_folders(cls, start_id): | |||||
| parent_folders = [] | |||||
| current_id = start_id | |||||
| while current_id: | |||||
| e, file = cls.get_by_id(current_id) | |||||
| if file.parent_id != file.id and e: | |||||
| parent_folders.append(file) | |||||
| current_id = file.parent_id | |||||
| else: | |||||
| parent_folders.append(file) | |||||
| break | |||||
| return parent_folders | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def insert(cls, file): | |||||
| if not cls.save(**file): | |||||
| raise RuntimeError("Database error (File)!") | |||||
| e, file = cls.get_by_id(file["id"]) | |||||
| if not e: | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| return file | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def delete(cls, file): | |||||
| return cls.delete_by_id(file.id) | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def delete_by_pf_id(cls, folder_id): | |||||
| return cls.model.delete().where(cls.model.parent_id == folder_id).execute() | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def delete_folder_by_pf_id(cls, user_id, folder_id): | |||||
| try: | |||||
| files = cls.model.select().where((cls.model.tenant_id == user_id) | |||||
| & (cls.model.parent_id == folder_id)) | |||||
| for file in files: | |||||
| cls.delete_folder_by_pf_id(user_id, file.id) | |||||
| return cls.model.delete().where((cls.model.tenant_id == user_id) | |||||
| & (cls.model.id == folder_id)).execute(), | |||||
| except Exception as e: | |||||
| print(e) | |||||
| raise RuntimeError("Database error (File retrieval)!") | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_file_count(cls, tenant_id): | |||||
| files = cls.model.select(cls.model.id).where(cls.model.tenant_id == tenant_id) | |||||
| return len(files) | |||||
| @classmethod | |||||
| @DB.connection_context() | |||||
| def get_folder_size(cls, folder_id): | |||||
| size = 0 | |||||
| def dfs(parent_id): | |||||
| nonlocal size | |||||
| for f in cls.model.select(*[cls.model.id, cls.model.size, cls.model.type]).where(cls.model.parent_id == parent_id): | |||||
| size += f.size | |||||
| if f.type == FileType.FOLDER.value: | |||||
| dfs(f.id) | |||||
| dfs(folder_id) | |||||
| return size |
| return FileType.AURAL.value | return FileType.AURAL.value | ||||
| if re.match(r".*\.(jpg|jpeg|png|tif|gif|pcx|tga|exif|fpx|svg|psd|cdr|pcd|dxf|ufo|eps|ai|raw|WMF|webp|avif|apng|icon|ico|mpg|mpeg|avi|rm|rmvb|mov|wmv|asf|dat|asx|wvx|mpe|mpa|mp4)$", filename): | if re.match(r".*\.(jpg|jpeg|png|tif|gif|pcx|tga|exif|fpx|svg|psd|cdr|pcd|dxf|ufo|eps|ai|raw|WMF|webp|avif|apng|icon|ico|mpg|mpeg|avi|rm|rmvb|mov|wmv|asf|dat|asx|wvx|mpe|mpa|mp4)$", filename): | ||||
| return FileType.VISUAL | |||||
| return FileType.VISUAL.value | |||||
| return FileType.OTHER.value | |||||
| def thumbnail(filename, blob): | def thumbnail(filename, blob): |