### What problem does this PR solve? - Consolidate HTTP API test fixtures using batch operations (batch_add_chunks, batch_create_chat_assistants) - Fix fixture initialization order in clear_session_with_chat_assistants - Add new SDK API test suite for session management (create/delete/list/update) ### Type of change - [x] Add test cases - [x] Refactoringtags/v0.19.1
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| from time import sleep | |||||
| import pytest | import pytest | ||||
| from common import ( | from common import ( | ||||
| add_chunk, | |||||
| batch_add_chunks, | |||||
| batch_create_chat_assistants, | |||||
| batch_create_datasets, | batch_create_datasets, | ||||
| bulk_upload_documents, | bulk_upload_documents, | ||||
| create_chat_assistant, | |||||
| delete_chat_assistants, | delete_chat_assistants, | ||||
| delete_datasets, | delete_datasets, | ||||
| delete_session_with_chat_assistants, | delete_session_with_chat_assistants, | ||||
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def clear_session_with_chat_assistants(request, api_key, add_chat_assistants): | def clear_session_with_chat_assistants(request, api_key, add_chat_assistants): | ||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| def cleanup(): | def cleanup(): | ||||
| for chat_assistant_id in chat_assistant_ids: | for chat_assistant_id in chat_assistant_ids: | ||||
| delete_session_with_chat_assistants(api_key, chat_assistant_id) | delete_session_with_chat_assistants(api_key, chat_assistant_id) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_dataset(request, api_key): | def add_dataset(request, api_key): | ||||
| dataset_id, document_id = add_document | dataset_id, document_id = add_document | ||||
| parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | ||||
| condition(api_key, dataset_id) | condition(api_key, dataset_id) | ||||
| chunk_ids = [] | |||||
| for i in range(4): | |||||
| res = add_chunk(api_key, dataset_id, document_id, {"content": f"chunk test {i}"}) | |||||
| chunk_ids.append(res["data"]["chunk"]["id"]) | |||||
| # issues/6487 | |||||
| from time import sleep | |||||
| sleep(1) | |||||
| chunk_ids = batch_add_chunks(api_key, dataset_id, document_id, 4) | |||||
| sleep(1) # issues/6487 | |||||
| return dataset_id, document_id, chunk_ids | return dataset_id, document_id, chunk_ids | ||||
| dataset_id, document_id = add_document | dataset_id, document_id = add_document | ||||
| parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | ||||
| condition(api_key, dataset_id) | condition(api_key, dataset_id) | ||||
| chat_assistant_ids = [] | |||||
| for i in range(5): | |||||
| res = create_chat_assistant(api_key, {"name": f"test_chat_assistant_{i}", "dataset_ids": [dataset_id]}) | |||||
| chat_assistant_ids.append(res["data"]["id"]) | |||||
| return dataset_id, document_id, chat_assistant_ids | |||||
| return dataset_id, document_id, batch_create_chat_assistants(api_key, 5) | 
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| import pytest | import pytest | ||||
| from common import create_chat_assistant, delete_chat_assistants, list_documents, parse_documents | |||||
| from common import batch_create_chat_assistants, delete_chat_assistants, list_documents, parse_documents | |||||
| from utils import wait_for | from utils import wait_for | ||||
| dataset_id, document_id = add_document | dataset_id, document_id = add_document | ||||
| parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | ||||
| condition(api_key, dataset_id) | condition(api_key, dataset_id) | ||||
| chat_assistant_ids = [] | |||||
| for i in range(5): | |||||
| res = create_chat_assistant(api_key, {"name": f"test_chat_assistant_{i}", "dataset_ids": [dataset_id]}) | |||||
| chat_assistant_ids.append(res["data"]["id"]) | |||||
| return dataset_id, document_id, chat_assistant_ids | |||||
| return dataset_id, document_id, batch_create_chat_assistants(api_key, 5) | 
| # | # | ||||
| from time import sleep | |||||
| import pytest | import pytest | ||||
| from common import add_chunk, delete_chunks, list_documents, parse_documents | |||||
| from common import batch_add_chunks, delete_chunks, list_documents, parse_documents | |||||
| from utils import wait_for | from utils import wait_for | ||||
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def add_chunks_func(request, api_key, add_document): | def add_chunks_func(request, api_key, add_document): | ||||
| def cleanup(): | |||||
| delete_chunks(api_key, dataset_id, document_id, {"chunk_ids": []}) | |||||
| request.addfinalizer(cleanup) | |||||
| dataset_id, document_id = add_document | dataset_id, document_id = add_document | ||||
| parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | parse_documents(api_key, dataset_id, {"document_ids": [document_id]}) | ||||
| condition(api_key, dataset_id) | condition(api_key, dataset_id) | ||||
| chunk_ids = [] | |||||
| for i in range(4): | |||||
| res = add_chunk(api_key, dataset_id, document_id, {"content": f"chunk test {i}"}) | |||||
| chunk_ids.append(res["data"]["chunk"]["id"]) | |||||
| chunk_ids = batch_add_chunks(api_key, dataset_id, document_id, 4) | |||||
| # issues/6487 | # issues/6487 | ||||
| from time import sleep | |||||
| sleep(1) | sleep(1) | ||||
| def cleanup(): | |||||
| delete_chunks(api_key, dataset_id, document_id, {"chunk_ids": chunk_ids}) | |||||
| request.addfinalizer(cleanup) | |||||
| return dataset_id, document_id, chunk_ids | return dataset_id, document_id, chunk_ids | 
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def add_document_func(request, api_key, add_dataset, ragflow_tmp_dir): | def add_document_func(request, api_key, add_dataset, ragflow_tmp_dir): | ||||
| dataset_id = add_dataset | |||||
| document_ids = bulk_upload_documents(api_key, dataset_id, 1, ragflow_tmp_dir) | |||||
| def cleanup(): | def cleanup(): | ||||
| delete_documents(api_key, dataset_id, {"ids": None}) | delete_documents(api_key, dataset_id, {"ids": None}) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| return dataset_id, document_ids[0] | |||||
| dataset_id = add_dataset | |||||
| return dataset_id, bulk_upload_documents(api_key, dataset_id, 1, ragflow_tmp_dir)[0] | |||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_documents(request, api_key, add_dataset, ragflow_tmp_dir): | def add_documents(request, api_key, add_dataset, ragflow_tmp_dir): | ||||
| dataset_id = add_dataset | |||||
| document_ids = bulk_upload_documents(api_key, dataset_id, 5, ragflow_tmp_dir) | |||||
| def cleanup(): | def cleanup(): | ||||
| delete_documents(api_key, dataset_id, {"ids": None}) | delete_documents(api_key, dataset_id, {"ids": None}) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| return dataset_id, document_ids | |||||
| dataset_id = add_dataset | |||||
| return dataset_id, bulk_upload_documents(api_key, dataset_id, 5, ragflow_tmp_dir) | |||||
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def add_documents_func(request, api_key, add_dataset_func, ragflow_tmp_dir): | def add_documents_func(request, api_key, add_dataset_func, ragflow_tmp_dir): | ||||
| dataset_id = add_dataset_func | |||||
| document_ids = bulk_upload_documents(api_key, dataset_id, 3, ragflow_tmp_dir) | |||||
| def cleanup(): | def cleanup(): | ||||
| delete_documents(api_key, dataset_id, {"ids": None}) | delete_documents(api_key, dataset_id, {"ids": None}) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| return dataset_id, document_ids | |||||
| dataset_id = add_dataset_func | |||||
| return dataset_id, bulk_upload_documents(api_key, dataset_id, 3, ragflow_tmp_dir) | 
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| import pytest | import pytest | ||||
| from common import create_session_with_chat_assistant, delete_session_with_chat_assistants | |||||
| from common import batch_add_sessions_with_chat_assistant, delete_session_with_chat_assistants | |||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_sessions_with_chat_assistant(request, api_key, add_chat_assistants): | def add_sessions_with_chat_assistant(request, api_key, add_chat_assistants): | ||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| def cleanup(): | def cleanup(): | ||||
| for chat_assistant_id in chat_assistant_ids: | for chat_assistant_id in chat_assistant_ids: | ||||
| delete_session_with_chat_assistants(api_key, chat_assistant_id) | delete_session_with_chat_assistants(api_key, chat_assistant_id) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| session_ids = [] | |||||
| for i in range(5): | |||||
| res = create_session_with_chat_assistant(api_key, chat_assistant_ids[0], {"name": f"session_with_chat_assistant_{i}"}) | |||||
| session_ids.append(res["data"]["id"]) | |||||
| return chat_assistant_ids[0], session_ids | |||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| return chat_assistant_ids[0], batch_add_sessions_with_chat_assistant(api_key, chat_assistant_ids[0], 5) | |||||
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def add_sessions_with_chat_assistant_func(request, api_key, add_chat_assistants): | def add_sessions_with_chat_assistant_func(request, api_key, add_chat_assistants): | ||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| def cleanup(): | def cleanup(): | ||||
| for chat_assistant_id in chat_assistant_ids: | for chat_assistant_id in chat_assistant_ids: | ||||
| delete_session_with_chat_assistants(api_key, chat_assistant_id) | delete_session_with_chat_assistants(api_key, chat_assistant_id) | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| session_ids = [] | |||||
| for i in range(5): | |||||
| res = create_session_with_chat_assistant(api_key, chat_assistant_ids[0], {"name": f"session_with_chat_assistant_{i}"}) | |||||
| session_ids.append(res["data"]["id"]) | |||||
| return chat_assistant_ids[0], session_ids | |||||
| _, _, chat_assistant_ids = add_chat_assistants | |||||
| return chat_assistant_ids[0], batch_add_sessions_with_chat_assistant(api_key, chat_assistant_ids[0], 5) | 
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| import pytest | import pytest | ||||
| from common import INVALID_API_TOKEN, SESSION_WITH_CHAT_NAME_LIMIT, create_session_with_chat_assistant, delete_chat_assistants, list_session_with_chat_assistants | from common import INVALID_API_TOKEN, SESSION_WITH_CHAT_NAME_LIMIT, create_session_with_chat_assistant, delete_chat_assistants, list_session_with_chat_assistants | ||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_concurrent_create_session(self, api_key, add_chat_assistants): | def test_concurrent_create_session(self, api_key, add_chat_assistants): | ||||
| chunk_num = 1000 | |||||
| count = 1000 | |||||
| _, _, chat_assistant_ids = add_chat_assistants | _, _, chat_assistant_ids = add_chat_assistants | ||||
| res = list_session_with_chat_assistants(api_key, chat_assistant_ids[0]) | res = list_session_with_chat_assistants(api_key, chat_assistant_ids[0]) | ||||
| if res["code"] != 0: | if res["code"] != 0: | ||||
| assert False, res | assert False, res | ||||
| chunks_count = len(res["data"]) | |||||
| sessions_count = len(res["data"]) | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | with ThreadPoolExecutor(max_workers=5) as executor: | ||||
| futures = [ | futures = [ | ||||
| chat_assistant_ids[0], | chat_assistant_ids[0], | ||||
| {"name": f"session with chat assistant test {i}"}, | {"name": f"session with chat assistant test {i}"}, | ||||
| ) | ) | ||||
| for i in range(chunk_num) | |||||
| for i in range(count) | |||||
| ] | ] | ||||
| responses = [f.result() for f in futures] | |||||
| assert all(r["code"] == 0 for r in responses) | |||||
| res = list_session_with_chat_assistants(api_key, chat_assistant_ids[0], {"page_size": chunk_num}) | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| assert all(future.result()["code"] == 0 for future in futures) | |||||
| res = list_session_with_chat_assistants(api_key, chat_assistant_ids[0], {"page_size": count * 2}) | |||||
| if res["code"] != 0: | if res["code"] != 0: | ||||
| assert False, res | assert False, res | ||||
| assert len(res["data"]) == chunks_count + chunk_num | |||||
| assert len(res["data"]) == sessions_count + count | |||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_add_session_to_deleted_chat_assistant(self, api_key, add_chat_assistants): | def test_add_session_to_deleted_chat_assistant(self, api_key, add_chat_assistants): | 
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| import pytest | import pytest | ||||
| from common import INVALID_API_TOKEN, batch_add_sessions_with_chat_assistant, delete_session_with_chat_assistants, list_session_with_chat_assistants | from common import INVALID_API_TOKEN, batch_add_sessions_with_chat_assistant, delete_session_with_chat_assistants, list_session_with_chat_assistants | ||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_concurrent_deletion(self, api_key, add_chat_assistants): | def test_concurrent_deletion(self, api_key, add_chat_assistants): | ||||
| sessions_num = 100 | |||||
| count = 100 | |||||
| _, _, chat_assistant_ids = add_chat_assistants | _, _, chat_assistant_ids = add_chat_assistants | ||||
| session_ids = batch_add_sessions_with_chat_assistant(api_key, chat_assistant_ids[0], sessions_num) | |||||
| session_ids = batch_add_sessions_with_chat_assistant(api_key, chat_assistant_ids[0], count) | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | with ThreadPoolExecutor(max_workers=5) as executor: | ||||
| futures = [ | futures = [ | ||||
| chat_assistant_ids[0], | chat_assistant_ids[0], | ||||
| {"ids": session_ids[i : i + 1]}, | {"ids": session_ids[i : i + 1]}, | ||||
| ) | ) | ||||
| for i in range(sessions_num) | |||||
| for i in range(count) | |||||
| ] | ] | ||||
| responses = [f.result() for f in futures] | |||||
| assert all(r["code"] == 0 for r in responses) | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| assert all(future.result()["code"] == 0 for future in futures) | |||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_delete_1k(self, api_key, add_chat_assistants): | def test_delete_1k(self, api_key, add_chat_assistants): | 
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| import pytest | import pytest | ||||
| from common import INVALID_API_TOKEN, delete_chat_assistants, list_session_with_chat_assistants | from common import INVALID_API_TOKEN, delete_chat_assistants, list_session_with_chat_assistants | ||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_concurrent_list(self, api_key, add_sessions_with_chat_assistant): | def test_concurrent_list(self, api_key, add_sessions_with_chat_assistant): | ||||
| count = 100 | |||||
| chat_assistant_id, _ = add_sessions_with_chat_assistant | chat_assistant_id, _ = add_sessions_with_chat_assistant | ||||
| with ThreadPoolExecutor(max_workers=5) as executor: | with ThreadPoolExecutor(max_workers=5) as executor: | ||||
| futures = [executor.submit(list_session_with_chat_assistants, api_key, chat_assistant_id) for i in range(100)] | |||||
| responses = [f.result() for f in futures] | |||||
| assert all(r["code"] == 0 for r in responses) | |||||
| futures = [executor.submit(list_session_with_chat_assistants, api_key, chat_assistant_id) for i in range(count)] | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| assert all(future.result()["code"] == 0 for future in futures) | |||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_invalid_params(self, api_key, add_sessions_with_chat_assistant): | def test_invalid_params(self, api_key, add_sessions_with_chat_assistant): | 
| # See the License for the specific language governing permissions and | # See the License for the specific language governing permissions and | ||||
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| from concurrent.futures import ThreadPoolExecutor | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| from random import randint | from random import randint | ||||
| import pytest | import pytest | ||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_concurrent_update_session(self, api_key, add_sessions_with_chat_assistant_func): | def test_concurrent_update_session(self, api_key, add_sessions_with_chat_assistant_func): | ||||
| chunk_num = 50 | |||||
| count = 50 | |||||
| chat_assistant_id, session_ids = add_sessions_with_chat_assistant_func | chat_assistant_id, session_ids = add_sessions_with_chat_assistant_func | ||||
| with ThreadPoolExecutor(max_workers=5) as executor: | with ThreadPoolExecutor(max_workers=5) as executor: | ||||
| session_ids[randint(0, 4)], | session_ids[randint(0, 4)], | ||||
| {"name": f"update session test {i}"}, | {"name": f"update session test {i}"}, | ||||
| ) | ) | ||||
| for i in range(chunk_num) | |||||
| for i in range(count) | |||||
| ] | ] | ||||
| responses = [f.result() for f in futures] | |||||
| assert all(r["code"] == 0 for r in responses) | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| assert all(future.result()["code"] == 0 for future in futures) | |||||
| @pytest.mark.p3 | @pytest.mark.p3 | ||||
| def test_update_session_to_deleted_chat_assistant(self, api_key, add_sessions_with_chat_assistant_func): | def test_update_session_to_deleted_chat_assistant(self, api_key, add_sessions_with_chat_assistant_func): | 
| from pathlib import Path | from pathlib import Path | ||||
| from ragflow_sdk import DataSet, Document, RAGFlow | |||||
| from ragflow_sdk import Chat, Chunk, DataSet, Document, RAGFlow, Session | |||||
| from utils.file_utils import create_txt_file | from utils.file_utils import create_txt_file | ||||
| # CHUNK MANAGEMENT WITHIN DATASET | # CHUNK MANAGEMENT WITHIN DATASET | ||||
| def batch_add_chunks(document: Document, num: int): | |||||
| def batch_add_chunks(document: Document, num: int) -> list[Chunk]: | |||||
| return [document.add_chunk(content=f"chunk test {i}") for i in range(num)] | return [document.add_chunk(content=f"chunk test {i}") for i in range(num)] | ||||
| # CHAT ASSISTANT MANAGEMENT | # CHAT ASSISTANT MANAGEMENT | ||||
| def batch_create_chat_assistants(client: RAGFlow, num: int): | |||||
| def batch_create_chat_assistants(client: RAGFlow, num: int) -> list[Chat]: | |||||
| return [client.create_chat(name=f"test_chat_assistant_{i}") for i in range(num)] | return [client.create_chat(name=f"test_chat_assistant_{i}") for i in range(num)] | ||||
| # SESSION MANAGEMENT | |||||
| def batch_add_sessions_with_chat_assistant(chat_assistant: Chat, num) -> list[Session]: | |||||
| return [chat_assistant.create_session(name=f"session_with_chat_assistant_{i}") for i in range(num)] | 
| # | # | ||||
| from pathlib import Path | from pathlib import Path | ||||
| from time import sleep | |||||
| import pytest | import pytest | ||||
| from common import ( | from common import ( | ||||
| batch_add_chunks, | |||||
| batch_create_chat_assistants, | |||||
| batch_create_datasets, | batch_create_datasets, | ||||
| bulk_upload_documents, | bulk_upload_documents, | ||||
| ) | ) | ||||
| from configs import HOST_ADDRESS, VERSION | from configs import HOST_ADDRESS, VERSION | ||||
| from pytest import FixtureRequest | from pytest import FixtureRequest | ||||
| from ragflow_sdk import Chunk, DataSet, Document, RAGFlow | |||||
| from ragflow_sdk import Chat, Chunk, DataSet, Document, RAGFlow | |||||
| from utils import wait_for | from utils import wait_for | ||||
| from utils.file_utils import ( | from utils.file_utils import ( | ||||
| create_docx_file, | create_docx_file, | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| @pytest.fixture(scope="function") | |||||
| def clear_session_with_chat_assistants(request, add_chat_assistants): | |||||
| def cleanup(): | |||||
| for chat_assistant in chat_assistants: | |||||
| try: | |||||
| chat_assistant.delete_sessions(ids=None) | |||||
| except Exception: | |||||
| pass | |||||
| request.addfinalizer(cleanup) | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_dataset(request: FixtureRequest, client: RAGFlow): | def add_dataset(request: FixtureRequest, client: RAGFlow): | ||||
| def cleanup(): | def cleanup(): | ||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_chunks(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]: | def add_chunks(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]: | ||||
| dataset, document = add_document | |||||
| dataset.async_parse_documents([document.id]) | |||||
| condition(dataset) | |||||
| chunks = [] | |||||
| for i in range(4): | |||||
| chunk = document.add_chunk(content=f"chunk test {i}") | |||||
| chunks.append(chunk) | |||||
| # issues/6487 | |||||
| from time import sleep | |||||
| sleep(1) | |||||
| def cleanup(): | def cleanup(): | ||||
| try: | try: | ||||
| document.delete_chunks(ids=[]) | document.delete_chunks(ids=[]) | ||||
| pass | pass | ||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| dataset, document = add_document | |||||
| dataset.async_parse_documents([document.id]) | |||||
| condition(dataset) | |||||
| chunks = batch_add_chunks(document, 4) | |||||
| # issues/6487 | |||||
| sleep(1) | |||||
| return dataset, document, chunks | return dataset, document, chunks | ||||
| @pytest.fixture(scope="class") | @pytest.fixture(scope="class") | ||||
| def add_chat_assistants(request, client, add_document): | |||||
| def add_chat_assistants(request, client, add_document) -> tuple[DataSet, Document, list[Chat]]: | |||||
| def cleanup(): | def cleanup(): | ||||
| client.delete_chats(ids=None) | |||||
| try: | |||||
| client.delete_chats(ids=None) | |||||
| except Exception: | |||||
| pass | |||||
| request.addfinalizer(cleanup) | request.addfinalizer(cleanup) | ||||
| dataset, document = add_document | dataset, document = add_document | ||||
| dataset.async_parse_documents([document.id]) | dataset.async_parse_documents([document.id]) | ||||
| condition(dataset) | condition(dataset) | ||||
| chat_assistants = [] | |||||
| for i in range(5): | |||||
| chat_assistant = client.create_chat(name=f"test_chat_assistant_{i}", dataset_ids=[dataset.id]) | |||||
| chat_assistants.append(chat_assistant) | |||||
| return dataset, document, chat_assistants | |||||
| return dataset, document, batch_create_chat_assistants(client, 5) | 
| # limitations under the License. | # limitations under the License. | ||||
| # | # | ||||
| import pytest | import pytest | ||||
| from common import batch_create_chat_assistants | |||||
| from pytest import FixtureRequest | from pytest import FixtureRequest | ||||
| from ragflow_sdk import Chat, DataSet, Document, RAGFlow | from ragflow_sdk import Chat, DataSet, Document, RAGFlow | ||||
| from utils import wait_for | from utils import wait_for | ||||
| dataset, document = add_document | dataset, document = add_document | ||||
| dataset.async_parse_documents([document.id]) | dataset.async_parse_documents([document.id]) | ||||
| condition(dataset) | condition(dataset) | ||||
| chat_assistants = [] | |||||
| for i in range(5): | |||||
| chat_assistant = client.create_chat(name=f"test_chat_assistant_{i}", dataset_ids=[dataset.id]) | |||||
| chat_assistants.append(chat_assistant) | |||||
| return dataset, document, chat_assistants | |||||
| return dataset, document, batch_create_chat_assistants(client, 5) | 
| # | # | ||||
| from time import sleep | |||||
| import pytest | import pytest | ||||
| from common import batch_add_chunks | |||||
| from pytest import FixtureRequest | from pytest import FixtureRequest | ||||
| from ragflow_sdk import Chunk, DataSet, Document | from ragflow_sdk import Chunk, DataSet, Document | ||||
| from utils import wait_for | from utils import wait_for | ||||
| @pytest.fixture(scope="function") | @pytest.fixture(scope="function") | ||||
| def add_chunks_func(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]: | def add_chunks_func(request: FixtureRequest, add_document: tuple[DataSet, Document]) -> tuple[DataSet, Document, list[Chunk]]: | ||||
| def cleanup(): | |||||
| try: | |||||
| document.delete_chunks(ids=[]) | |||||
| except Exception: | |||||
| pass | |||||
| request.addfinalizer(cleanup) | |||||
| dataset, document = add_document | dataset, document = add_document | ||||
| dataset.async_parse_documents([document.id]) | dataset.async_parse_documents([document.id]) | ||||
| condition(dataset) | condition(dataset) | ||||
| chunks = [document.add_chunk(content=f"chunk test {i}") for i in range(4)] | |||||
| chunks = batch_add_chunks(document, 4) | |||||
| # issues/6487 | # issues/6487 | ||||
| from time import sleep | |||||
| sleep(1) | sleep(1) | ||||
| def cleanup(): | |||||
| document.delete_chunks(ids=[]) | |||||
| request.addfinalizer(cleanup) | |||||
| return dataset, document, chunks | return dataset, document, chunks | 
| # | |||||
| # Copyright 2025 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| import pytest | |||||
| from common import batch_add_sessions_with_chat_assistant | |||||
| from pytest import FixtureRequest | |||||
| from ragflow_sdk import Chat, DataSet, Document, Session | |||||
| @pytest.fixture(scope="class") | |||||
| def add_sessions_with_chat_assistant(request: FixtureRequest, add_chat_assistants: tuple[DataSet, Document, list[Chat]]) -> tuple[Chat, list[Session]]: | |||||
| def cleanup(): | |||||
| for chat_assistant in chat_assistants: | |||||
| try: | |||||
| chat_assistant.delete_sessions(ids=None) | |||||
| except Exception: | |||||
| pass | |||||
| request.addfinalizer(cleanup) | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| return chat_assistants[0], batch_add_sessions_with_chat_assistant(chat_assistants[0], 5) | |||||
| @pytest.fixture(scope="function") | |||||
| def add_sessions_with_chat_assistant_func(request: FixtureRequest, add_chat_assistants: tuple[DataSet, Document, list[Chat]]) -> tuple[Chat, list[Session]]: | |||||
| def cleanup(): | |||||
| for chat_assistant in chat_assistants: | |||||
| try: | |||||
| chat_assistant.delete_sessions(ids=None) | |||||
| except Exception: | |||||
| pass | |||||
| request.addfinalizer(cleanup) | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| return chat_assistants[0], batch_add_sessions_with_chat_assistant(chat_assistants[0], 5) | 
| # | |||||
| # Copyright 2025 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| import pytest | |||||
| from configs import SESSION_WITH_CHAT_NAME_LIMIT | |||||
| @pytest.mark.usefixtures("clear_session_with_chat_assistants") | |||||
| class TestSessionWithChatAssistantCreate: | |||||
| @pytest.mark.p1 | |||||
| @pytest.mark.parametrize( | |||||
| "name, expected_message", | |||||
| [ | |||||
| ("valid_name", ""), | |||||
| pytest.param("a" * (SESSION_WITH_CHAT_NAME_LIMIT + 1), "", marks=pytest.mark.skip(reason="issues/")), | |||||
| pytest.param(1, "", marks=pytest.mark.skip(reason="issues/")), | |||||
| ("", "`name` can not be empty."), | |||||
| ("duplicated_name", ""), | |||||
| ("case insensitive", ""), | |||||
| ], | |||||
| ) | |||||
| def test_name(self, add_chat_assistants, name, expected_message): | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| chat_assistant = chat_assistants[0] | |||||
| if name == "duplicated_name": | |||||
| chat_assistant.create_session(name=name) | |||||
| elif name == "case insensitive": | |||||
| chat_assistant.create_session(name=name.upper()) | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.create_session(name=name) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| session = chat_assistant.create_session(name=name) | |||||
| assert session.name == name, str(session) | |||||
| assert session.chat_id == chat_assistant.id, str(session) | |||||
| @pytest.mark.p3 | |||||
| def test_concurrent_create_session(self, add_chat_assistants): | |||||
| count = 1000 | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| chat_assistant = chat_assistants[0] | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | |||||
| futures = [executor.submit(chat_assistant.create_session, name=f"session with chat assistant test {i}") for i in range(count)] | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| updated_sessions = chat_assistant.list_sessions(page_size=count * 2) | |||||
| assert len(updated_sessions) == count | |||||
| @pytest.mark.p3 | |||||
| def test_add_session_to_deleted_chat_assistant(self, client, add_chat_assistants): | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| chat_assistant = chat_assistants[0] | |||||
| client.delete_chats(ids=[chat_assistant.id]) | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.create_session(name="valid_name") | |||||
| assert "You do not own the assistant" in str(excinfo.value) | 
| # | |||||
| # Copyright 2025 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| import pytest | |||||
| from common import batch_add_sessions_with_chat_assistant | |||||
| class TestSessionWithChatAssistantDelete: | |||||
| @pytest.mark.parametrize( | |||||
| "payload", | |||||
| [ | |||||
| pytest.param(lambda r: {"ids": ["invalid_id"] + r}, marks=pytest.mark.p3), | |||||
| pytest.param(lambda r: {"ids": r[:1] + ["invalid_id"] + r[1:5]}, marks=pytest.mark.p1), | |||||
| pytest.param(lambda r: {"ids": r + ["invalid_id"]}, marks=pytest.mark.p3), | |||||
| ], | |||||
| ) | |||||
| def test_delete_partial_invalid_id(self, add_sessions_with_chat_assistant_func, payload): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| if callable(payload): | |||||
| payload = payload([session.id for session in sessions]) | |||||
| chat_assistant.delete_sessions(**payload) | |||||
| sessions = chat_assistant.list_sessions() | |||||
| assert len(sessions) == 0 | |||||
| @pytest.mark.p3 | |||||
| def test_repeated_deletion(self, add_sessions_with_chat_assistant_func): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| session_ids = {"ids": [session.id for session in sessions]} | |||||
| chat_assistant.delete_sessions(**session_ids) | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.delete_sessions(**session_ids) | |||||
| assert "The chat doesn't own the session" in str(excinfo.value) | |||||
| @pytest.mark.p3 | |||||
| def test_duplicate_deletion(self, add_sessions_with_chat_assistant_func): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| session_ids = {"ids": [session.id for session in sessions] * 2} | |||||
| chat_assistant.delete_sessions(**session_ids) | |||||
| sessions = chat_assistant.list_sessions() | |||||
| assert len(sessions) == 0 | |||||
| @pytest.mark.p3 | |||||
| def test_concurrent_deletion(self, add_chat_assistants): | |||||
| count = 100 | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| chat_assistant = chat_assistants[0] | |||||
| sessions = batch_add_sessions_with_chat_assistant(chat_assistant, count) | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | |||||
| futures = [executor.submit(chat_assistant.delete_sessions, ids=[sessions[i].id]) for i in range(count)] | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| @pytest.mark.p3 | |||||
| def test_delete_1k(self, add_chat_assistants): | |||||
| count = 1_000 | |||||
| _, _, chat_assistants = add_chat_assistants | |||||
| chat_assistant = chat_assistants[0] | |||||
| ssessions = batch_add_sessions_with_chat_assistant(chat_assistant, count) | |||||
| chat_assistant.delete_sessions(ids=[ssession.id for ssession in ssessions]) | |||||
| sessions = chat_assistant.list_sessions() | |||||
| assert len(sessions) == 0 | |||||
| @pytest.mark.parametrize( | |||||
| "payload, expected_message, remaining", | |||||
| [ | |||||
| pytest.param(None, """TypeError("argument of type \'NoneType\' is not iterable")""", 0, marks=pytest.mark.skip), | |||||
| pytest.param({"ids": ["invalid_id"]}, "The chat doesn't own the session invalid_id", 5, marks=pytest.mark.p3), | |||||
| pytest.param("not json", """AttributeError("\'str\' object has no attribute \'get\'")""", 5, marks=pytest.mark.skip), | |||||
| pytest.param(lambda r: {"ids": r[:1]}, "", 4, marks=pytest.mark.p3), | |||||
| pytest.param(lambda r: {"ids": r}, "", 0, marks=pytest.mark.p1), | |||||
| pytest.param({"ids": []}, "", 0, marks=pytest.mark.p3), | |||||
| ], | |||||
| ) | |||||
| def test_basic_scenarios(self, add_sessions_with_chat_assistant_func, payload, expected_message, remaining): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| if callable(payload): | |||||
| payload = payload([session.id for session in sessions]) | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.delete_sessions(**payload) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| chat_assistant.delete_sessions(**payload) | |||||
| sessions = chat_assistant.list_sessions() | |||||
| assert len(sessions) == remaining | 
| # | |||||
| # Copyright 2025 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| import pytest | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| class TestSessionsWithChatAssistantList: | |||||
| @pytest.mark.p1 | |||||
| @pytest.mark.parametrize( | |||||
| "params, expected_page_size, expected_message", | |||||
| [ | |||||
| ({"page": None, "page_size": 2}, 0, "not instance of"), | |||||
| pytest.param({"page": 0, "page_size": 2}, 0, "ValueError('Search does not support negative slicing.')", marks=pytest.mark.skip), | |||||
| ({"page": 2, "page_size": 2}, 2, ""), | |||||
| ({"page": 3, "page_size": 2}, 1, ""), | |||||
| ({"page": "3", "page_size": 2}, 0, "not instance of"), | |||||
| pytest.param({"page": -1, "page_size": 2}, 0, "ValueError('Search does not support negative slicing.')", marks=pytest.mark.skip), | |||||
| pytest.param({"page": "a", "page_size": 2}, 0, """ValueError("invalid literal for int() with base 10: \'a\'")""", marks=pytest.mark.skip), | |||||
| ], | |||||
| ) | |||||
| def test_page(self, add_sessions_with_chat_assistant, params, expected_page_size, expected_message): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| sessions = chat_assistant.list_sessions(**params) | |||||
| assert len(sessions) == expected_page_size | |||||
| @pytest.mark.p1 | |||||
| @pytest.mark.parametrize( | |||||
| "params, expected_page_size, expected_message", | |||||
| [ | |||||
| ({"page_size": None}, 0, "not instance of"), | |||||
| ({"page_size": 0}, 0, ""), | |||||
| ({"page_size": 1}, 1, ""), | |||||
| ({"page_size": 6}, 5, ""), | |||||
| ({"page_size": "1"}, 0, "not instance of"), | |||||
| pytest.param({"page_size": -1}, 5, "", marks=pytest.mark.skip), | |||||
| pytest.param({"page_size": "a"}, 0, """ValueError("invalid literal for int() with base 10: \'a\'")""", marks=pytest.mark.skip), | |||||
| ], | |||||
| ) | |||||
| def test_page_size(self, add_sessions_with_chat_assistant, params, expected_page_size, expected_message): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| sessions = chat_assistant.list_sessions(**params) | |||||
| assert len(sessions) == expected_page_size | |||||
| @pytest.mark.p3 | |||||
| @pytest.mark.parametrize( | |||||
| "params, expected_message", | |||||
| [ | |||||
| ({"orderby": None}, "not instance of"), | |||||
| ({"orderby": "create_time"}, ""), | |||||
| ({"orderby": "update_time"}, ""), | |||||
| ({"orderby": "name", "desc": "False"}, "not instance of"), | |||||
| pytest.param({"orderby": "unknown"}, "orderby should be create_time or update_time", marks=pytest.mark.skip(reason="issues/")), | |||||
| ], | |||||
| ) | |||||
| def test_orderby(self, add_sessions_with_chat_assistant, params, expected_message): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| chat_assistant.list_sessions(**params) | |||||
| @pytest.mark.p3 | |||||
| @pytest.mark.parametrize( | |||||
| "params, expected_message", | |||||
| [ | |||||
| ({"desc": None}, "not instance of"), | |||||
| ({"desc": "true"}, "not instance of"), | |||||
| ({"desc": "True"}, "not instance of"), | |||||
| ({"desc": True}, ""), | |||||
| ({"desc": "false"}, "not instance of"), | |||||
| ({"desc": "False"}, "not instance of"), | |||||
| ({"desc": False}, ""), | |||||
| ({"desc": "False", "orderby": "update_time"}, "not instance of"), | |||||
| pytest.param({"desc": "unknown"}, "desc should be true or false", marks=pytest.mark.skip(reason="issues/")), | |||||
| ], | |||||
| ) | |||||
| def test_desc(self, add_sessions_with_chat_assistant, params, expected_message): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| chat_assistant.list_sessions(**params) | |||||
| @pytest.mark.p1 | |||||
| @pytest.mark.parametrize( | |||||
| "params, expected_num, expected_message", | |||||
| [ | |||||
| ({"name": None}, 0, "not instance of"), | |||||
| ({"name": ""}, 5, ""), | |||||
| ({"name": "session_with_chat_assistant_1"}, 1, ""), | |||||
| ({"name": "unknown"}, 0, ""), | |||||
| ], | |||||
| ) | |||||
| def test_name(self, add_sessions_with_chat_assistant, params, expected_num, expected_message): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| sessions = chat_assistant.list_sessions(**params) | |||||
| if params["name"] != "session_with_chat_assistant_1": | |||||
| assert len(sessions) == expected_num | |||||
| else: | |||||
| assert sessions[0].name == params["name"] | |||||
| @pytest.mark.p1 | |||||
| @pytest.mark.parametrize( | |||||
| "session_id, expected_num, expected_message", | |||||
| [ | |||||
| (None, 0, "not instance of"), | |||||
| ("", 5, ""), | |||||
| (lambda r: r[0], 1, ""), | |||||
| ("unknown", 0, ""), | |||||
| ], | |||||
| ) | |||||
| def test_id(self, add_sessions_with_chat_assistant, session_id, expected_num, expected_message): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant | |||||
| if callable(session_id): | |||||
| params = {"id": session_id([s.id for s in sessions])} | |||||
| else: | |||||
| params = {"id": session_id} | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| list_sessions = chat_assistant.list_sessions(**params) | |||||
| if "id" in params and params["id"] != sessions[0].id: | |||||
| assert len(list_sessions) == expected_num | |||||
| else: | |||||
| assert list_sessions[0].id == params["id"] | |||||
| @pytest.mark.p3 | |||||
| @pytest.mark.parametrize( | |||||
| "session_id, name, expected_num, expected_message", | |||||
| [ | |||||
| (lambda r: r[0], "session_with_chat_assistant_0", 1, ""), | |||||
| (lambda r: r[0], "session_with_chat_assistant_100", 0, ""), | |||||
| (lambda r: r[0], "unknown", 0, ""), | |||||
| ("id", "session_with_chat_assistant_0", 0, ""), | |||||
| ], | |||||
| ) | |||||
| def test_name_and_id(self, add_sessions_with_chat_assistant, session_id, name, expected_num, expected_message): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant | |||||
| if callable(session_id): | |||||
| params = {"id": session_id([s.id for s in sessions]), "name": name} | |||||
| else: | |||||
| params = {"id": session_id, "name": name} | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions(**params) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| list_sessions = chat_assistant.list_sessions(**params) | |||||
| assert len(list_sessions) == expected_num | |||||
| @pytest.mark.p3 | |||||
| def test_concurrent_list(self, add_sessions_with_chat_assistant): | |||||
| count = 100 | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | |||||
| futures = [executor.submit(chat_assistant.list_sessions) for i in range(count)] | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| @pytest.mark.p3 | |||||
| def test_list_chats_after_deleting_associated_chat_assistant(self, client, add_sessions_with_chat_assistant): | |||||
| chat_assistant, _ = add_sessions_with_chat_assistant | |||||
| client.delete_chats(ids=[chat_assistant.id]) | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| chat_assistant.list_sessions() | |||||
| assert "You don't own the assistant" in str(excinfo.value) | 
| # | |||||
| # Copyright 2025 The InfiniFlow Authors. All Rights Reserved. | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # | |||||
| from concurrent.futures import ThreadPoolExecutor, as_completed | |||||
| from random import randint | |||||
| import pytest | |||||
| from configs import SESSION_WITH_CHAT_NAME_LIMIT | |||||
| class TestSessionWithChatAssistantUpdate: | |||||
| @pytest.mark.parametrize( | |||||
| "payload, expected_message", | |||||
| [ | |||||
| pytest.param({"name": "valid_name"}, "", marks=pytest.mark.p1), | |||||
| pytest.param({"name": "a" * (SESSION_WITH_CHAT_NAME_LIMIT + 1)}, "", marks=pytest.mark.skip(reason="issues/")), | |||||
| pytest.param({"name": 1}, "", marks=pytest.mark.skip(reason="issues/")), | |||||
| pytest.param({"name": ""}, "`name` can not be empty.", marks=pytest.mark.p3), | |||||
| pytest.param({"name": "duplicated_name"}, "", marks=pytest.mark.p3), | |||||
| pytest.param({"name": "case insensitive"}, "", marks=pytest.mark.p3), | |||||
| ], | |||||
| ) | |||||
| def test_name(self, add_sessions_with_chat_assistant_func, payload, expected_message): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| session = sessions[0] | |||||
| if payload["name"] == "duplicated_name": | |||||
| session.update(payload) | |||||
| elif payload["name"] == "case insensitive": | |||||
| session.update({"name": payload["name"].upper()}) | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| session.update(payload) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| session.update(payload) | |||||
| updated_session = chat_assistant.list_sessions(id=session.id)[0] | |||||
| assert updated_session.name == payload["name"] | |||||
| @pytest.mark.p3 | |||||
| def test_repeated_update_session(self, add_sessions_with_chat_assistant_func): | |||||
| _, sessions = add_sessions_with_chat_assistant_func | |||||
| session = sessions[0] | |||||
| session.update({"name": "valid_name_1"}) | |||||
| session.update({"name": "valid_name_2"}) | |||||
| @pytest.mark.p3 | |||||
| @pytest.mark.parametrize( | |||||
| "payload, expected_message", | |||||
| [ | |||||
| pytest.param({"unknown_key": "unknown_value"}, "ValueError", marks=pytest.mark.skip), | |||||
| ({}, ""), | |||||
| pytest.param(None, "TypeError", marks=pytest.mark.skip), | |||||
| ], | |||||
| ) | |||||
| def test_invalid_params(self, add_sessions_with_chat_assistant_func, payload, expected_message): | |||||
| _, sessions = add_sessions_with_chat_assistant_func | |||||
| session = sessions[0] | |||||
| if expected_message: | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| session.update(payload) | |||||
| assert expected_message in str(excinfo.value) | |||||
| else: | |||||
| session.update(payload) | |||||
| @pytest.mark.p3 | |||||
| def test_concurrent_update_session(self, add_sessions_with_chat_assistant_func): | |||||
| count = 50 | |||||
| _, sessions = add_sessions_with_chat_assistant_func | |||||
| with ThreadPoolExecutor(max_workers=5) as executor: | |||||
| futures = [executor.submit(sessions[randint(0, 4)].update, {"name": f"update session test {i}"}) for i in range(count)] | |||||
| responses = list(as_completed(futures)) | |||||
| assert len(responses) == count, responses | |||||
| @pytest.mark.p3 | |||||
| def test_update_session_to_deleted_chat_assistant(self, client, add_sessions_with_chat_assistant_func): | |||||
| chat_assistant, sessions = add_sessions_with_chat_assistant_func | |||||
| client.delete_chats(ids=[chat_assistant.id]) | |||||
| with pytest.raises(Exception) as excinfo: | |||||
| sessions[0].update({"name": "valid_name"}) | |||||
| assert "You do not own the session" in str(excinfo.value) |