You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

settings.py 2.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import os
  17. import logging
  18. from api.utils import get_base_config, decrypt_database_config
  19. from api.utils.file_utils import get_project_base_directory
  20. # Server
  21. RAG_CONF_PATH = os.path.join(get_project_base_directory(), "conf")
  22. # Get storage type and document engine from system environment variables
  23. STORAGE_IMPL_TYPE = os.getenv('STORAGE_IMPL', 'MINIO')
  24. DOC_ENGINE = os.getenv('DOC_ENGINE', 'elasticsearch')
  25. ES = {}
  26. INFINITY = {}
  27. AZURE = {}
  28. S3 = {}
  29. MINIO = {}
  30. OSS = {}
  31. # Initialize the selected configuration data based on environment variables to solve the problem of initialization errors due to lack of configuration
  32. if DOC_ENGINE == 'elasticsearch':
  33. ES = get_base_config("es", {})
  34. elif DOC_ENGINE == 'infinity':
  35. INFINITY = get_base_config("infinity", {"uri": "infinity:23817"})
  36. if STORAGE_IMPL_TYPE in ['AZURE_SPN', 'AZURE_SAS']:
  37. AZURE = get_base_config("azure", {})
  38. elif STORAGE_IMPL_TYPE == 'AWS_S3':
  39. S3 = get_base_config("s3", {})
  40. elif STORAGE_IMPL_TYPE == 'MINIO':
  41. MINIO = decrypt_database_config(name="minio")
  42. elif STORAGE_IMPL_TYPE == 'OSS':
  43. OSS = get_base_config("oss", {})
  44. try:
  45. REDIS = decrypt_database_config(name="redis")
  46. except Exception:
  47. REDIS = {}
  48. pass
  49. DOC_MAXIMUM_SIZE = int(os.environ.get("MAX_CONTENT_LENGTH", 128 * 1024 * 1024))
  50. SVR_QUEUE_NAME = "rag_flow_svr_queue"
  51. SVR_CONSUMER_GROUP_NAME = "rag_flow_svr_task_broker"
  52. PAGERANK_FLD = "pagerank_fea"
  53. TAG_FLD = "tag_feas"
  54. PARALLEL_DEVICES = None
  55. try:
  56. import torch.cuda
  57. PARALLEL_DEVICES = torch.cuda.device_count()
  58. logging.info(f"found {PARALLEL_DEVICES} gpus")
  59. except Exception:
  60. logging.info("can't import package 'torch'")
  61. def print_rag_settings():
  62. logging.info(f"MAX_CONTENT_LENGTH: {DOC_MAXIMUM_SIZE}")
  63. logging.info(f"MAX_FILE_COUNT_PER_USER: {int(os.environ.get('MAX_FILE_NUM_PER_USER', 0))}")
  64. def get_svr_queue_name(priority: int) -> str:
  65. if priority == 0:
  66. return SVR_QUEUE_NAME
  67. return f"{SVR_QUEUE_NAME}_{priority}"
  68. def get_svr_queue_names():
  69. return [get_svr_queue_name(priority) for priority in [1, 0]]