Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. import boto3
  2. import os
  3. from botocore.exceptions import ClientError
  4. from botocore.client import Config
  5. import time
  6. from io import BytesIO
  7. from rag.utils import singleton
  8. @singleton
  9. class RAGFlowS3(object):
  10. def __init__(self):
  11. self.conn = None
  12. self.endpoint = os.getenv('ENDPOINT', None)
  13. self.access_key = os.getenv('ACCESS_KEY', None)
  14. self.secret_key = os.getenv('SECRET_KEY', None)
  15. self.region = os.getenv('REGION', None)
  16. self.__open__()
  17. def __open__(self):
  18. try:
  19. if self.conn:
  20. self.__close__()
  21. except Exception:
  22. pass
  23. try:
  24. config = Config(
  25. s3={
  26. 'addressing_style': 'virtual'
  27. }
  28. )
  29. self.conn = boto3.client(
  30. 's3',
  31. endpoint_url=self.endpoint,
  32. region_name=self.region,
  33. aws_access_key_id=self.access_key,
  34. aws_secret_access_key=self.secret_key,
  35. config=config
  36. )
  37. except Exception:
  38. logger.exception(
  39. "Fail to connect %s" % self.endpoint)
  40. def __close__(self):
  41. del self.conn
  42. self.conn = None
  43. def bucket_exists(self, bucket):
  44. try:
  45. logger.debug(f"head_bucket bucketname {bucket}")
  46. self.conn.head_bucket(Bucket=bucket)
  47. exists = True
  48. except ClientError:
  49. logger.exception(f"head_bucket error {bucket}")
  50. exists = False
  51. return exists
  52. def health(self):
  53. bucket, fnm, binary = "txtxtxtxt1", "txtxtxtxt1", b"_t@@@1"
  54. if not self.bucket_exists(bucket):
  55. self.conn.create_bucket(Bucket=bucket)
  56. logger.debug(f"create bucket {bucket} ********")
  57. r = self.conn.upload_fileobj(BytesIO(binary), bucket, fnm)
  58. return r
  59. def get_properties(self, bucket, key):
  60. return {}
  61. def list(self, bucket, dir, recursive=True):
  62. return []
  63. def put(self, bucket, fnm, binary):
  64. logger.debug(f"bucket name {bucket}; filename :{fnm}:")
  65. for _ in range(1):
  66. try:
  67. if not self.bucket_exists(bucket):
  68. self.conn.create_bucket(Bucket=bucket)
  69. logger.info(f"create bucket {bucket} ********")
  70. r = self.conn.upload_fileobj(BytesIO(binary), bucket, fnm)
  71. return r
  72. except Exception:
  73. logger.exception(f"Fail put {bucket}/{fnm}")
  74. self.__open__()
  75. time.sleep(1)
  76. def rm(self, bucket, fnm):
  77. try:
  78. self.conn.delete_object(Bucket=bucket, Key=fnm)
  79. except Exception:
  80. logger.exception(f"Fail rm {bucket}/{fnm}")
  81. def get(self, bucket, fnm):
  82. for _ in range(1):
  83. try:
  84. r = self.conn.get_object(Bucket=bucket, Key=fnm)
  85. object_data = r['Body'].read()
  86. return object_data
  87. except Exception:
  88. logger.exception(f"fail get {bucket}/{fnm}")
  89. self.__open__()
  90. time.sleep(1)
  91. return
  92. def obj_exist(self, bucket, fnm):
  93. try:
  94. if self.conn.head_object(Bucket=bucket, Key=fnm):
  95. return True
  96. except ClientError as e:
  97. if e.response['Error']['Code'] == '404':
  98. return False
  99. else:
  100. raise
  101. def get_presigned_url(self, bucket, fnm, expires):
  102. for _ in range(10):
  103. try:
  104. r = self.conn.generate_presigned_url('get_object',
  105. Params={'Bucket': bucket,
  106. 'Key': fnm},
  107. ExpiresIn=expires)
  108. return r
  109. except Exception:
  110. logger.exception(f"fail get url {bucket}/{fnm}")
  111. self.__open__()
  112. time.sleep(1)
  113. return