Ви не можете вибрати більше 25 тем Теми мають розпочинатися з літери або цифри, можуть містити дефіси (-) і не повинні перевищувати 35 символів.

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. #
  2. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. import logging
  17. from abc import ABC
  18. import pandas as pd
  19. import requests
  20. from bs4 import BeautifulSoup
  21. import re
  22. from agent.component.base import ComponentBase, ComponentParamBase
  23. class BaiduParam(ComponentParamBase):
  24. """
  25. Define the Baidu component parameters.
  26. """
  27. def __init__(self):
  28. super().__init__()
  29. self.top_n = 10
  30. def check(self):
  31. self.check_positive_integer(self.top_n, "Top N")
  32. class Baidu(ComponentBase, ABC):
  33. component_name = "Baidu"
  34. def _run(self, history, **kwargs):
  35. ans = self.get_input()
  36. ans = " - ".join(ans["content"]) if "content" in ans else ""
  37. if not ans:
  38. return Baidu.be_output("")
  39. try:
  40. url = 'https://www.baidu.com/s?wd=' + ans + '&rn=' + str(self._param.top_n)
  41. headers = {
  42. 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
  43. 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
  44. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
  45. 'Connection': 'keep-alive',
  46. }
  47. response = requests.get(url=url, headers=headers)
  48. # check if request success
  49. if response.status_code == 200:
  50. soup = BeautifulSoup(response.text, 'html.parser')
  51. url_res = []
  52. title_res = []
  53. body_res = []
  54. for item in soup.select('.result.c-container'):
  55. # extract title
  56. title_res.append(item.select_one('h3 a').get_text(strip=True))
  57. url_res.append(item.select_one('h3 a')['href'])
  58. body_res.append(item.select_one('.c-abstract').get_text(strip=True) if item.select_one('.c-abstract') else '')
  59. baidu_res = [{"content": re.sub('<em>|</em>', '', '<a href="' + url + '">' + title + '</a> ' + body)} for
  60. url, title, body in zip(url_res, title_res, body_res)]
  61. del body_res, url_res, title_res
  62. except Exception as e:
  63. return Baidu.be_output("**ERROR**: " + str(e))
  64. if not baidu_res:
  65. return Baidu.be_output("")
  66. df = pd.DataFrame(baidu_res)
  67. logging.debug(f"df: {str(df)}")
  68. return df