You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

hooks.ts 3.2KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. import { useTranslation } from 'react-i18next'
  2. import { AddDocumentsStep } from './types'
  3. import type { DataSourceOption, Datasource } from '@/app/components/rag-pipeline/components/panel/test-run/types'
  4. import { useMemo } from 'react'
  5. import { BlockEnum, type Node } from '@/app/components/workflow/types'
  6. import type { DataSourceNodeType } from '@/app/components/workflow/nodes/data-source/types'
  7. import { DataSourceType } from '@/models/datasets'
  8. import { DataSourceProvider } from '@/models/common'
  9. export const useAddDocumentsSteps = () => {
  10. const { t } = useTranslation()
  11. const steps = [
  12. {
  13. label: t('datasetPipeline.addDocuments.steps.chooseDatasource'),
  14. value: AddDocumentsStep.dataSource,
  15. },
  16. {
  17. label: t('datasetPipeline.addDocuments.steps.ProcessDocuments'),
  18. value: AddDocumentsStep.processDocuments,
  19. },
  20. {
  21. label: t('datasetPipeline.addDocuments.steps.ProcessingDocuments'),
  22. value: AddDocumentsStep.processingDocuments,
  23. },
  24. ]
  25. return steps
  26. }
  27. export const useDatasourceOptions = (pipelineNodes: Node<DataSourceNodeType>[]) => {
  28. const { t } = useTranslation()
  29. const datasources: Datasource[] = useMemo(() => {
  30. const datasourceNodes = pipelineNodes.filter(node => node.data.type === BlockEnum.DataSource)
  31. return datasourceNodes.map((node) => {
  32. let type: DataSourceType | DataSourceProvider = DataSourceType.FILE
  33. switch (node.data.tool_name) {
  34. case 'file_upload':
  35. type = DataSourceType.FILE
  36. break
  37. case 'search_notion':
  38. type = DataSourceType.NOTION
  39. break
  40. case 'firecrawl':
  41. type = DataSourceProvider.fireCrawl
  42. break
  43. case 'jina_reader':
  44. type = DataSourceProvider.jinaReader
  45. break
  46. case 'water_crawl':
  47. type = DataSourceProvider.waterCrawl
  48. break
  49. }
  50. return {
  51. nodeId: node.id,
  52. type,
  53. variables: node.data.variables,
  54. }
  55. })
  56. }, [pipelineNodes])
  57. const options = useMemo(() => {
  58. const options: DataSourceOption[] = []
  59. datasources.forEach((source) => {
  60. if (source.type === DataSourceType.FILE) {
  61. options.push({
  62. label: t('datasetPipeline.testRun.dataSource.localFiles'),
  63. value: source.nodeId,
  64. type: DataSourceType.FILE,
  65. })
  66. }
  67. if (source.type === DataSourceType.NOTION) {
  68. options.push({
  69. label: 'Notion',
  70. value: source.nodeId,
  71. type: DataSourceType.NOTION,
  72. })
  73. }
  74. if (source.type === DataSourceProvider.fireCrawl) {
  75. options.push({
  76. label: 'Firecrawl',
  77. value: source.nodeId,
  78. type: DataSourceProvider.fireCrawl,
  79. })
  80. }
  81. if (source.type === DataSourceProvider.jinaReader) {
  82. options.push({
  83. label: 'Jina Reader',
  84. value: source.nodeId,
  85. type: DataSourceProvider.jinaReader,
  86. })
  87. }
  88. if (source.type === DataSourceProvider.waterCrawl) {
  89. options.push({
  90. label: 'Water Crawl',
  91. value: source.nodeId,
  92. type: DataSourceProvider.waterCrawl,
  93. })
  94. }
  95. })
  96. return options
  97. }, [datasources, t])
  98. return { datasources, options }
  99. }