You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

json_parser.py 4.1KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117
  1. # -*- coding: utf-8 -*-
  2. # The following documents are mainly referenced, and only adaptation modifications have been made
  3. # from https://github.com/langchain-ai/langchain/blob/master/libs/text-splitters/langchain_text_splitters/json.py
  4. import json
  5. from typing import Any
  6. from rag.nlp import find_codec
  7. class RAGFlowJsonParser:
  8. def __init__(
  9. self, max_chunk_size: int = 2000, min_chunk_size: int | None = None
  10. ):
  11. super().__init__()
  12. self.max_chunk_size = max_chunk_size * 2
  13. self.min_chunk_size = (
  14. min_chunk_size
  15. if min_chunk_size is not None
  16. else max(max_chunk_size - 200, 50)
  17. )
  18. def __call__(self, binary):
  19. encoding = find_codec(binary)
  20. txt = binary.decode(encoding, errors="ignore")
  21. json_data = json.loads(txt)
  22. chunks = self.split_json(json_data, True)
  23. sections = [json.dumps(line, ensure_ascii=False) for line in chunks if line]
  24. return sections
  25. @staticmethod
  26. def _json_size(data: dict) -> int:
  27. """Calculate the size of the serialized JSON object."""
  28. return len(json.dumps(data, ensure_ascii=False))
  29. @staticmethod
  30. def _set_nested_dict(d: dict, path: list[str], value: Any) -> None:
  31. """Set a value in a nested dictionary based on the given path."""
  32. for key in path[:-1]:
  33. d = d.setdefault(key, {})
  34. d[path[-1]] = value
  35. def _list_to_dict_preprocessing(self, data: Any) -> Any:
  36. if isinstance(data, dict):
  37. # Process each key-value pair in the dictionary
  38. return {k: self._list_to_dict_preprocessing(v) for k, v in data.items()}
  39. elif isinstance(data, list):
  40. # Convert the list to a dictionary with index-based keys
  41. return {
  42. str(i): self._list_to_dict_preprocessing(item)
  43. for i, item in enumerate(data)
  44. }
  45. else:
  46. # Base case: the item is neither a dict nor a list, so return it unchanged
  47. return data
  48. def _json_split(
  49. self,
  50. data,
  51. current_path: list[str] | None,
  52. chunks: list[dict] | None,
  53. ) -> list[dict]:
  54. """
  55. Split json into maximum size dictionaries while preserving structure.
  56. """
  57. current_path = current_path or []
  58. chunks = chunks or [{}]
  59. if isinstance(data, dict):
  60. for key, value in data.items():
  61. new_path = current_path + [key]
  62. chunk_size = self._json_size(chunks[-1])
  63. size = self._json_size({key: value})
  64. remaining = self.max_chunk_size - chunk_size
  65. if size < remaining:
  66. # Add item to current chunk
  67. self._set_nested_dict(chunks[-1], new_path, value)
  68. else:
  69. if chunk_size >= self.min_chunk_size:
  70. # Chunk is big enough, start a new chunk
  71. chunks.append({})
  72. # Iterate
  73. self._json_split(value, new_path, chunks)
  74. else:
  75. # handle single item
  76. self._set_nested_dict(chunks[-1], current_path, data)
  77. return chunks
  78. def split_json(
  79. self,
  80. json_data,
  81. convert_lists: bool = False,
  82. ) -> list[dict]:
  83. """Splits JSON into a list of JSON chunks"""
  84. if convert_lists:
  85. preprocessed_data = self._list_to_dict_preprocessing(json_data)
  86. chunks = self._json_split(preprocessed_data, None, None)
  87. else:
  88. chunks = self._json_split(json_data, None, None)
  89. # Remove the last chunk if it's empty
  90. if not chunks[-1]:
  91. chunks.pop()
  92. return chunks
  93. def split_text(
  94. self,
  95. json_data: dict[str, Any],
  96. convert_lists: bool = False,
  97. ensure_ascii: bool = True,
  98. ) -> list[str]:
  99. """Splits JSON into a list of JSON formatted strings"""
  100. chunks = self.split_json(json_data=json_data, convert_lists=convert_lists)
  101. # Convert to string
  102. return [json.dumps(chunk, ensure_ascii=ensure_ascii) for chunk in chunks]