Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

commands.py 77KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826
  1. import base64
  2. import json
  3. import logging
  4. import secrets
  5. from typing import Any
  6. import click
  7. import sqlalchemy as sa
  8. from flask import current_app
  9. from pydantic import TypeAdapter
  10. from sqlalchemy import select
  11. from sqlalchemy.exc import SQLAlchemyError
  12. from sqlalchemy.orm import sessionmaker
  13. from configs import dify_config
  14. from constants.languages import languages
  15. from core.helper import encrypter
  16. from core.plugin.impl.plugin import PluginInstaller
  17. from core.rag.datasource.vdb.vector_factory import Vector
  18. from core.rag.datasource.vdb.vector_type import VectorType
  19. from core.rag.index_processor.constant.built_in_field import BuiltInField
  20. from core.rag.models.document import Document
  21. from core.tools.entities.tool_entities import CredentialType
  22. from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
  23. from events.app_event import app_was_created
  24. from extensions.ext_database import db
  25. from extensions.ext_redis import redis_client
  26. from extensions.ext_storage import storage
  27. from extensions.storage.opendal_storage import OpenDALStorage
  28. from extensions.storage.storage_type import StorageType
  29. from libs.helper import email as email_validate
  30. from libs.password import hash_password, password_pattern, valid_password
  31. from libs.rsa import generate_key_pair
  32. from models import Tenant
  33. from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
  34. from models.dataset import Document as DatasetDocument
  35. from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation, UploadFile
  36. from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
  37. from models.provider import Provider, ProviderModel
  38. from models.provider_ids import DatasourceProviderID, ToolProviderID
  39. from models.source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding
  40. from models.tools import ToolOAuthSystemClient
  41. from services.account_service import AccountService, RegisterService, TenantService
  42. from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
  43. from services.plugin.data_migration import PluginDataMigration
  44. from services.plugin.plugin_migration import PluginMigration
  45. from services.plugin.plugin_service import PluginService
  46. from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
  47. logger = logging.getLogger(__name__)
  48. @click.command("reset-password", help="Reset the account password.")
  49. @click.option("--email", prompt=True, help="Account email to reset password for")
  50. @click.option("--new-password", prompt=True, help="New password")
  51. @click.option("--password-confirm", prompt=True, help="Confirm new password")
  52. def reset_password(email, new_password, password_confirm):
  53. """
  54. Reset password of owner account
  55. Only available in SELF_HOSTED mode
  56. """
  57. if str(new_password).strip() != str(password_confirm).strip():
  58. click.echo(click.style("Passwords do not match.", fg="red"))
  59. return
  60. with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
  61. account = session.query(Account).where(Account.email == email).one_or_none()
  62. if not account:
  63. click.echo(click.style(f"Account not found for email: {email}", fg="red"))
  64. return
  65. try:
  66. valid_password(new_password)
  67. except:
  68. click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
  69. return
  70. # generate password salt
  71. salt = secrets.token_bytes(16)
  72. base64_salt = base64.b64encode(salt).decode()
  73. # encrypt password with salt
  74. password_hashed = hash_password(new_password, salt)
  75. base64_password_hashed = base64.b64encode(password_hashed).decode()
  76. account.password = base64_password_hashed
  77. account.password_salt = base64_salt
  78. AccountService.reset_login_error_rate_limit(email)
  79. click.echo(click.style("Password reset successfully.", fg="green"))
  80. @click.command("reset-email", help="Reset the account email.")
  81. @click.option("--email", prompt=True, help="Current account email")
  82. @click.option("--new-email", prompt=True, help="New email")
  83. @click.option("--email-confirm", prompt=True, help="Confirm new email")
  84. def reset_email(email, new_email, email_confirm):
  85. """
  86. Replace account email
  87. :return:
  88. """
  89. if str(new_email).strip() != str(email_confirm).strip():
  90. click.echo(click.style("New emails do not match.", fg="red"))
  91. return
  92. with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
  93. account = session.query(Account).where(Account.email == email).one_or_none()
  94. if not account:
  95. click.echo(click.style(f"Account not found for email: {email}", fg="red"))
  96. return
  97. try:
  98. email_validate(new_email)
  99. except:
  100. click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
  101. return
  102. account.email = new_email
  103. click.echo(click.style("Email updated successfully.", fg="green"))
  104. @click.command(
  105. "reset-encrypt-key-pair",
  106. help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
  107. "After the reset, all LLM credentials will become invalid, "
  108. "requiring re-entry."
  109. "Only support SELF_HOSTED mode.",
  110. )
  111. @click.confirmation_option(
  112. prompt=click.style(
  113. "Are you sure you want to reset encrypt key pair? This operation cannot be rolled back!", fg="red"
  114. )
  115. )
  116. def reset_encrypt_key_pair():
  117. """
  118. Reset the encrypted key pair of workspace for encrypt LLM credentials.
  119. After the reset, all LLM credentials will become invalid, requiring re-entry.
  120. Only support SELF_HOSTED mode.
  121. """
  122. if dify_config.EDITION != "SELF_HOSTED":
  123. click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
  124. return
  125. with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
  126. tenants = session.query(Tenant).all()
  127. for tenant in tenants:
  128. if not tenant:
  129. click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
  130. return
  131. tenant.encrypt_public_key = generate_key_pair(tenant.id)
  132. session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
  133. session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
  134. click.echo(
  135. click.style(
  136. f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
  137. fg="green",
  138. )
  139. )
  140. @click.command("vdb-migrate", help="Migrate vector db.")
  141. @click.option("--scope", default="all", prompt=False, help="The scope of vector database to migrate, Default is All.")
  142. def vdb_migrate(scope: str):
  143. if scope in {"knowledge", "all"}:
  144. migrate_knowledge_vector_database()
  145. if scope in {"annotation", "all"}:
  146. migrate_annotation_vector_database()
  147. def migrate_annotation_vector_database():
  148. """
  149. Migrate annotation datas to target vector database .
  150. """
  151. click.echo(click.style("Starting annotation data migration.", fg="green"))
  152. create_count = 0
  153. skipped_count = 0
  154. total_count = 0
  155. page = 1
  156. while True:
  157. try:
  158. # get apps info
  159. per_page = 50
  160. with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
  161. apps = (
  162. session.query(App)
  163. .where(App.status == "normal")
  164. .order_by(App.created_at.desc())
  165. .limit(per_page)
  166. .offset((page - 1) * per_page)
  167. .all()
  168. )
  169. if not apps:
  170. break
  171. except SQLAlchemyError:
  172. raise
  173. page += 1
  174. for app in apps:
  175. total_count = total_count + 1
  176. click.echo(
  177. f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
  178. )
  179. try:
  180. click.echo(f"Creating app annotation index: {app.id}")
  181. with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
  182. app_annotation_setting = (
  183. session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
  184. )
  185. if not app_annotation_setting:
  186. skipped_count = skipped_count + 1
  187. click.echo(f"App annotation setting disabled: {app.id}")
  188. continue
  189. # get dataset_collection_binding info
  190. dataset_collection_binding = (
  191. session.query(DatasetCollectionBinding)
  192. .where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
  193. .first()
  194. )
  195. if not dataset_collection_binding:
  196. click.echo(f"App annotation collection binding not found: {app.id}")
  197. continue
  198. annotations = session.scalars(
  199. select(MessageAnnotation).where(MessageAnnotation.app_id == app.id)
  200. ).all()
  201. dataset = Dataset(
  202. id=app.id,
  203. tenant_id=app.tenant_id,
  204. indexing_technique="high_quality",
  205. embedding_model_provider=dataset_collection_binding.provider_name,
  206. embedding_model=dataset_collection_binding.model_name,
  207. collection_binding_id=dataset_collection_binding.id,
  208. )
  209. documents = []
  210. if annotations:
  211. for annotation in annotations:
  212. document = Document(
  213. page_content=annotation.question,
  214. metadata={"annotation_id": annotation.id, "app_id": app.id, "doc_id": annotation.id},
  215. )
  216. documents.append(document)
  217. vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
  218. click.echo(f"Migrating annotations for app: {app.id}.")
  219. try:
  220. vector.delete()
  221. click.echo(click.style(f"Deleted vector index for app {app.id}.", fg="green"))
  222. except Exception as e:
  223. click.echo(click.style(f"Failed to delete vector index for app {app.id}.", fg="red"))
  224. raise e
  225. if documents:
  226. try:
  227. click.echo(
  228. click.style(
  229. f"Creating vector index with {len(documents)} annotations for app {app.id}.",
  230. fg="green",
  231. )
  232. )
  233. vector.create(documents)
  234. click.echo(click.style(f"Created vector index for app {app.id}.", fg="green"))
  235. except Exception as e:
  236. click.echo(click.style(f"Failed to created vector index for app {app.id}.", fg="red"))
  237. raise e
  238. click.echo(f"Successfully migrated app annotation {app.id}.")
  239. create_count += 1
  240. except Exception as e:
  241. click.echo(
  242. click.style(f"Error creating app annotation index: {e.__class__.__name__} {str(e)}", fg="red")
  243. )
  244. continue
  245. click.echo(
  246. click.style(
  247. f"Migration complete. Created {create_count} app annotation indexes. Skipped {skipped_count} apps.",
  248. fg="green",
  249. )
  250. )
  251. def migrate_knowledge_vector_database():
  252. """
  253. Migrate vector database datas to target vector database .
  254. """
  255. click.echo(click.style("Starting vector database migration.", fg="green"))
  256. create_count = 0
  257. skipped_count = 0
  258. total_count = 0
  259. vector_type = dify_config.VECTOR_STORE
  260. upper_collection_vector_types = {
  261. VectorType.MILVUS,
  262. VectorType.PGVECTOR,
  263. VectorType.VASTBASE,
  264. VectorType.RELYT,
  265. VectorType.WEAVIATE,
  266. VectorType.ORACLE,
  267. VectorType.ELASTICSEARCH,
  268. VectorType.OPENGAUSS,
  269. VectorType.TABLESTORE,
  270. VectorType.MATRIXONE,
  271. }
  272. lower_collection_vector_types = {
  273. VectorType.ANALYTICDB,
  274. VectorType.CHROMA,
  275. VectorType.MYSCALE,
  276. VectorType.PGVECTO_RS,
  277. VectorType.TIDB_VECTOR,
  278. VectorType.OPENSEARCH,
  279. VectorType.TENCENT,
  280. VectorType.BAIDU,
  281. VectorType.VIKINGDB,
  282. VectorType.UPSTASH,
  283. VectorType.COUCHBASE,
  284. VectorType.OCEANBASE,
  285. }
  286. page = 1
  287. while True:
  288. try:
  289. stmt = (
  290. select(Dataset).where(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
  291. )
  292. datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
  293. except SQLAlchemyError:
  294. raise
  295. page += 1
  296. for dataset in datasets:
  297. total_count = total_count + 1
  298. click.echo(
  299. f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped."
  300. )
  301. try:
  302. click.echo(f"Creating dataset vector database index: {dataset.id}")
  303. if dataset.index_struct_dict:
  304. if dataset.index_struct_dict["type"] == vector_type:
  305. skipped_count = skipped_count + 1
  306. continue
  307. collection_name = ""
  308. dataset_id = dataset.id
  309. if vector_type in upper_collection_vector_types:
  310. collection_name = Dataset.gen_collection_name_by_id(dataset_id)
  311. elif vector_type == VectorType.QDRANT:
  312. if dataset.collection_binding_id:
  313. dataset_collection_binding = (
  314. db.session.query(DatasetCollectionBinding)
  315. .where(DatasetCollectionBinding.id == dataset.collection_binding_id)
  316. .one_or_none()
  317. )
  318. if dataset_collection_binding:
  319. collection_name = dataset_collection_binding.collection_name
  320. else:
  321. raise ValueError("Dataset Collection Binding not found")
  322. else:
  323. collection_name = Dataset.gen_collection_name_by_id(dataset_id)
  324. elif vector_type in lower_collection_vector_types:
  325. collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
  326. else:
  327. raise ValueError(f"Vector store {vector_type} is not supported.")
  328. index_struct_dict = {"type": vector_type, "vector_store": {"class_prefix": collection_name}}
  329. dataset.index_struct = json.dumps(index_struct_dict)
  330. vector = Vector(dataset)
  331. click.echo(f"Migrating dataset {dataset.id}.")
  332. try:
  333. vector.delete()
  334. click.echo(
  335. click.style(f"Deleted vector index {collection_name} for dataset {dataset.id}.", fg="green")
  336. )
  337. except Exception as e:
  338. click.echo(
  339. click.style(
  340. f"Failed to delete vector index {collection_name} for dataset {dataset.id}.", fg="red"
  341. )
  342. )
  343. raise e
  344. dataset_documents = db.session.scalars(
  345. select(DatasetDocument).where(
  346. DatasetDocument.dataset_id == dataset.id,
  347. DatasetDocument.indexing_status == "completed",
  348. DatasetDocument.enabled == True,
  349. DatasetDocument.archived == False,
  350. )
  351. ).all()
  352. documents = []
  353. segments_count = 0
  354. for dataset_document in dataset_documents:
  355. segments = db.session.scalars(
  356. select(DocumentSegment).where(
  357. DocumentSegment.document_id == dataset_document.id,
  358. DocumentSegment.status == "completed",
  359. DocumentSegment.enabled == True,
  360. )
  361. ).all()
  362. for segment in segments:
  363. document = Document(
  364. page_content=segment.content,
  365. metadata={
  366. "doc_id": segment.index_node_id,
  367. "doc_hash": segment.index_node_hash,
  368. "document_id": segment.document_id,
  369. "dataset_id": segment.dataset_id,
  370. },
  371. )
  372. documents.append(document)
  373. segments_count = segments_count + 1
  374. if documents:
  375. try:
  376. click.echo(
  377. click.style(
  378. f"Creating vector index with {len(documents)} documents of {segments_count}"
  379. f" segments for dataset {dataset.id}.",
  380. fg="green",
  381. )
  382. )
  383. vector.create(documents)
  384. click.echo(click.style(f"Created vector index for dataset {dataset.id}.", fg="green"))
  385. except Exception as e:
  386. click.echo(click.style(f"Failed to created vector index for dataset {dataset.id}.", fg="red"))
  387. raise e
  388. db.session.add(dataset)
  389. db.session.commit()
  390. click.echo(f"Successfully migrated dataset {dataset.id}.")
  391. create_count += 1
  392. except Exception as e:
  393. db.session.rollback()
  394. click.echo(click.style(f"Error creating dataset index: {e.__class__.__name__} {str(e)}", fg="red"))
  395. continue
  396. click.echo(
  397. click.style(
  398. f"Migration complete. Created {create_count} dataset indexes. Skipped {skipped_count} datasets.", fg="green"
  399. )
  400. )
  401. @click.command("convert-to-agent-apps", help="Convert Agent Assistant to Agent App.")
  402. def convert_to_agent_apps():
  403. """
  404. Convert Agent Assistant to Agent App.
  405. """
  406. click.echo(click.style("Starting convert to agent apps.", fg="green"))
  407. proceeded_app_ids = []
  408. while True:
  409. # fetch first 1000 apps
  410. sql_query = """SELECT a.id AS id FROM apps a
  411. INNER JOIN app_model_configs am ON a.app_model_config_id=am.id
  412. WHERE a.mode = 'chat'
  413. AND am.agent_mode is not null
  414. AND (
  415. am.agent_mode like '%"strategy": "function_call"%'
  416. OR am.agent_mode like '%"strategy": "react"%'
  417. )
  418. AND (
  419. am.agent_mode like '{"enabled": true%'
  420. OR am.agent_mode like '{"max_iteration": %'
  421. ) ORDER BY a.created_at DESC LIMIT 1000
  422. """
  423. with db.engine.begin() as conn:
  424. rs = conn.execute(sa.text(sql_query))
  425. apps = []
  426. for i in rs:
  427. app_id = str(i.id)
  428. if app_id not in proceeded_app_ids:
  429. proceeded_app_ids.append(app_id)
  430. app = db.session.query(App).where(App.id == app_id).first()
  431. if app is not None:
  432. apps.append(app)
  433. if len(apps) == 0:
  434. break
  435. for app in apps:
  436. click.echo(f"Converting app: {app.id}")
  437. try:
  438. app.mode = AppMode.AGENT_CHAT
  439. db.session.commit()
  440. # update conversation mode to agent
  441. db.session.query(Conversation).where(Conversation.app_id == app.id).update(
  442. {Conversation.mode: AppMode.AGENT_CHAT}
  443. )
  444. db.session.commit()
  445. click.echo(click.style(f"Converted app: {app.id}", fg="green"))
  446. except Exception as e:
  447. click.echo(click.style(f"Convert app error: {e.__class__.__name__} {str(e)}", fg="red"))
  448. click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green"))
  449. @click.command("add-qdrant-index", help="Add Qdrant index.")
  450. @click.option("--field", default="metadata.doc_id", prompt=False, help="Index field , default is metadata.doc_id.")
  451. def add_qdrant_index(field: str):
  452. click.echo(click.style("Starting Qdrant index creation.", fg="green"))
  453. create_count = 0
  454. try:
  455. bindings = db.session.query(DatasetCollectionBinding).all()
  456. if not bindings:
  457. click.echo(click.style("No dataset collection bindings found.", fg="red"))
  458. return
  459. import qdrant_client
  460. from qdrant_client.http.exceptions import UnexpectedResponse
  461. from qdrant_client.http.models import PayloadSchemaType
  462. from core.rag.datasource.vdb.qdrant.qdrant_vector import PathQdrantParams, QdrantConfig
  463. for binding in bindings:
  464. if dify_config.QDRANT_URL is None:
  465. raise ValueError("Qdrant URL is required.")
  466. qdrant_config = QdrantConfig(
  467. endpoint=dify_config.QDRANT_URL,
  468. api_key=dify_config.QDRANT_API_KEY,
  469. root_path=current_app.root_path,
  470. timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
  471. grpc_port=dify_config.QDRANT_GRPC_PORT,
  472. prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
  473. )
  474. try:
  475. params = qdrant_config.to_qdrant_params()
  476. # Check the type before using
  477. if isinstance(params, PathQdrantParams):
  478. # PathQdrantParams case
  479. client = qdrant_client.QdrantClient(path=params.path)
  480. else:
  481. # UrlQdrantParams case - params is UrlQdrantParams
  482. client = qdrant_client.QdrantClient(
  483. url=params.url,
  484. api_key=params.api_key,
  485. timeout=int(params.timeout),
  486. verify=params.verify,
  487. grpc_port=params.grpc_port,
  488. prefer_grpc=params.prefer_grpc,
  489. )
  490. # create payload index
  491. client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
  492. create_count += 1
  493. except UnexpectedResponse as e:
  494. # Collection does not exist, so return
  495. if e.status_code == 404:
  496. click.echo(click.style(f"Collection not found: {binding.collection_name}.", fg="red"))
  497. continue
  498. # Some other error occurred, so re-raise the exception
  499. else:
  500. click.echo(
  501. click.style(
  502. f"Failed to create Qdrant index for collection: {binding.collection_name}.", fg="red"
  503. )
  504. )
  505. except Exception:
  506. click.echo(click.style("Failed to create Qdrant client.", fg="red"))
  507. click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
  508. @click.command("old-metadata-migration", help="Old metadata migration.")
  509. def old_metadata_migration():
  510. """
  511. Old metadata migration.
  512. """
  513. click.echo(click.style("Starting old metadata migration.", fg="green"))
  514. page = 1
  515. while True:
  516. try:
  517. stmt = (
  518. select(DatasetDocument)
  519. .where(DatasetDocument.doc_metadata.is_not(None))
  520. .order_by(DatasetDocument.created_at.desc())
  521. )
  522. documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
  523. except SQLAlchemyError:
  524. raise
  525. if not documents:
  526. break
  527. for document in documents:
  528. if document.doc_metadata:
  529. doc_metadata = document.doc_metadata
  530. for key in doc_metadata:
  531. for field in BuiltInField:
  532. if field.value == key:
  533. break
  534. else:
  535. dataset_metadata = (
  536. db.session.query(DatasetMetadata)
  537. .where(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
  538. .first()
  539. )
  540. if not dataset_metadata:
  541. dataset_metadata = DatasetMetadata(
  542. tenant_id=document.tenant_id,
  543. dataset_id=document.dataset_id,
  544. name=key,
  545. type="string",
  546. created_by=document.created_by,
  547. )
  548. db.session.add(dataset_metadata)
  549. db.session.flush()
  550. dataset_metadata_binding = DatasetMetadataBinding(
  551. tenant_id=document.tenant_id,
  552. dataset_id=document.dataset_id,
  553. metadata_id=dataset_metadata.id,
  554. document_id=document.id,
  555. created_by=document.created_by,
  556. )
  557. db.session.add(dataset_metadata_binding)
  558. else:
  559. dataset_metadata_binding = (
  560. db.session.query(DatasetMetadataBinding) # type: ignore
  561. .where(
  562. DatasetMetadataBinding.dataset_id == document.dataset_id,
  563. DatasetMetadataBinding.document_id == document.id,
  564. DatasetMetadataBinding.metadata_id == dataset_metadata.id,
  565. )
  566. .first()
  567. )
  568. if not dataset_metadata_binding:
  569. dataset_metadata_binding = DatasetMetadataBinding(
  570. tenant_id=document.tenant_id,
  571. dataset_id=document.dataset_id,
  572. metadata_id=dataset_metadata.id,
  573. document_id=document.id,
  574. created_by=document.created_by,
  575. )
  576. db.session.add(dataset_metadata_binding)
  577. db.session.commit()
  578. page += 1
  579. click.echo(click.style("Old metadata migration completed.", fg="green"))
  580. @click.command("create-tenant", help="Create account and tenant.")
  581. @click.option("--email", prompt=True, help="Tenant account email.")
  582. @click.option("--name", prompt=True, help="Workspace name.")
  583. @click.option("--language", prompt=True, help="Account language, default: en-US.")
  584. def create_tenant(email: str, language: str | None = None, name: str | None = None):
  585. """
  586. Create tenant account
  587. """
  588. if not email:
  589. click.echo(click.style("Email is required.", fg="red"))
  590. return
  591. # Create account
  592. email = email.strip()
  593. if "@" not in email:
  594. click.echo(click.style("Invalid email address.", fg="red"))
  595. return
  596. account_name = email.split("@")[0]
  597. if language not in languages:
  598. language = "en-US"
  599. # Validates name encoding for non-Latin characters.
  600. name = name.strip().encode("utf-8").decode("utf-8") if name else None
  601. # generate random password
  602. new_password = secrets.token_urlsafe(16)
  603. # register account
  604. account = RegisterService.register(
  605. email=email,
  606. name=account_name,
  607. password=new_password,
  608. language=language,
  609. create_workspace_required=False,
  610. )
  611. TenantService.create_owner_tenant_if_not_exist(account, name)
  612. click.echo(
  613. click.style(
  614. f"Account and tenant created.\nAccount: {email}\nPassword: {new_password}",
  615. fg="green",
  616. )
  617. )
  618. @click.command("upgrade-db", help="Upgrade the database")
  619. def upgrade_db():
  620. click.echo("Preparing database migration...")
  621. lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
  622. if lock.acquire(blocking=False):
  623. try:
  624. click.echo(click.style("Starting database migration.", fg="green"))
  625. # run db migration
  626. import flask_migrate
  627. flask_migrate.upgrade()
  628. click.echo(click.style("Database migration successful!", fg="green"))
  629. except Exception:
  630. logger.exception("Failed to execute database migration")
  631. finally:
  632. lock.release()
  633. else:
  634. click.echo("Database migration skipped")
  635. @click.command("fix-app-site-missing", help="Fix app related site missing issue.")
  636. def fix_app_site_missing():
  637. """
  638. Fix app related site missing issue.
  639. """
  640. click.echo(click.style("Starting fix for missing app-related sites.", fg="green"))
  641. failed_app_ids = []
  642. while True:
  643. sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
  644. where sites.id is null limit 1000"""
  645. with db.engine.begin() as conn:
  646. rs = conn.execute(sa.text(sql))
  647. processed_count = 0
  648. for i in rs:
  649. processed_count += 1
  650. app_id = str(i.id)
  651. if app_id in failed_app_ids:
  652. continue
  653. try:
  654. app = db.session.query(App).where(App.id == app_id).first()
  655. if not app:
  656. logger.info("App %s not found", app_id)
  657. continue
  658. tenant = app.tenant
  659. if tenant:
  660. accounts = tenant.get_accounts()
  661. if not accounts:
  662. logger.info("Fix failed for app %s", app.id)
  663. continue
  664. account = accounts[0]
  665. logger.info("Fixing missing site for app %s", app.id)
  666. app_was_created.send(app, account=account)
  667. except Exception:
  668. failed_app_ids.append(app_id)
  669. click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red"))
  670. logger.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
  671. continue
  672. if not processed_count:
  673. break
  674. click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
  675. @click.command("migrate-data-for-plugin", help="Migrate data for plugin.")
  676. def migrate_data_for_plugin():
  677. """
  678. Migrate data for plugin.
  679. """
  680. click.echo(click.style("Starting migrate data for plugin.", fg="white"))
  681. PluginDataMigration.migrate()
  682. click.echo(click.style("Migrate data for plugin completed.", fg="green"))
  683. @click.command("extract-plugins", help="Extract plugins.")
  684. @click.option("--output_file", prompt=True, help="The file to store the extracted plugins.", default="plugins.jsonl")
  685. @click.option("--workers", prompt=True, help="The number of workers to extract plugins.", default=10)
  686. def extract_plugins(output_file: str, workers: int):
  687. """
  688. Extract plugins.
  689. """
  690. click.echo(click.style("Starting extract plugins.", fg="white"))
  691. PluginMigration.extract_plugins(output_file, workers)
  692. click.echo(click.style("Extract plugins completed.", fg="green"))
  693. @click.command("extract-unique-identifiers", help="Extract unique identifiers.")
  694. @click.option(
  695. "--output_file",
  696. prompt=True,
  697. help="The file to store the extracted unique identifiers.",
  698. default="unique_identifiers.json",
  699. )
  700. @click.option(
  701. "--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
  702. )
  703. def extract_unique_plugins(output_file: str, input_file: str):
  704. """
  705. Extract unique plugins.
  706. """
  707. click.echo(click.style("Starting extract unique plugins.", fg="white"))
  708. PluginMigration.extract_unique_plugins_to_file(input_file, output_file)
  709. click.echo(click.style("Extract unique plugins completed.", fg="green"))
  710. @click.command("install-plugins", help="Install plugins.")
  711. @click.option(
  712. "--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
  713. )
  714. @click.option(
  715. "--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
  716. )
  717. @click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
  718. def install_plugins(input_file: str, output_file: str, workers: int):
  719. """
  720. Install plugins.
  721. """
  722. click.echo(click.style("Starting install plugins.", fg="white"))
  723. PluginMigration.install_plugins(input_file, output_file, workers)
  724. click.echo(click.style("Install plugins completed.", fg="green"))
  725. @click.command("clear-free-plan-tenant-expired-logs", help="Clear free plan tenant expired logs.")
  726. @click.option("--days", prompt=True, help="The days to clear free plan tenant expired logs.", default=30)
  727. @click.option("--batch", prompt=True, help="The batch size to clear free plan tenant expired logs.", default=100)
  728. @click.option(
  729. "--tenant_ids",
  730. prompt=True,
  731. multiple=True,
  732. help="The tenant ids to clear free plan tenant expired logs.",
  733. )
  734. def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[str]):
  735. """
  736. Clear free plan tenant expired logs.
  737. """
  738. click.echo(click.style("Starting clear free plan tenant expired logs.", fg="white"))
  739. ClearFreePlanTenantExpiredLogs.process(days, batch, tenant_ids)
  740. click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green"))
  741. @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
  742. @click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
  743. def clear_orphaned_file_records(force: bool):
  744. """
  745. Clear orphaned file records in the database.
  746. """
  747. # define tables and columns to process
  748. files_tables = [
  749. {"table": "upload_files", "id_column": "id", "key_column": "key"},
  750. {"table": "tool_files", "id_column": "id", "key_column": "file_key"},
  751. ]
  752. ids_tables = [
  753. {"type": "uuid", "table": "message_files", "column": "upload_file_id"},
  754. {"type": "text", "table": "documents", "column": "data_source_info"},
  755. {"type": "text", "table": "document_segments", "column": "content"},
  756. {"type": "text", "table": "messages", "column": "answer"},
  757. {"type": "text", "table": "workflow_node_executions", "column": "inputs"},
  758. {"type": "text", "table": "workflow_node_executions", "column": "process_data"},
  759. {"type": "text", "table": "workflow_node_executions", "column": "outputs"},
  760. {"type": "text", "table": "conversations", "column": "introduction"},
  761. {"type": "text", "table": "conversations", "column": "system_instruction"},
  762. {"type": "text", "table": "accounts", "column": "avatar"},
  763. {"type": "text", "table": "apps", "column": "icon"},
  764. {"type": "text", "table": "sites", "column": "icon"},
  765. {"type": "json", "table": "messages", "column": "inputs"},
  766. {"type": "json", "table": "messages", "column": "message"},
  767. ]
  768. # notify user and ask for confirmation
  769. click.echo(
  770. click.style(
  771. "This command will first find and delete orphaned file records from the message_files table,", fg="yellow"
  772. )
  773. )
  774. click.echo(
  775. click.style(
  776. "and then it will find and delete orphaned file records in the following tables:",
  777. fg="yellow",
  778. )
  779. )
  780. for files_table in files_tables:
  781. click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
  782. click.echo(
  783. click.style("The following tables and columns will be scanned to find orphaned file records:", fg="yellow")
  784. )
  785. for ids_table in ids_tables:
  786. click.echo(click.style(f"- {ids_table['table']} ({ids_table['column']})", fg="yellow"))
  787. click.echo("")
  788. click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
  789. click.echo(
  790. click.style(
  791. (
  792. "Since not all patterns have been fully tested, "
  793. "please note that this command may delete unintended file records."
  794. ),
  795. fg="yellow",
  796. )
  797. )
  798. click.echo(
  799. click.style("This cannot be undone. Please make sure to back up your database before proceeding.", fg="yellow")
  800. )
  801. click.echo(
  802. click.style(
  803. (
  804. "It is also recommended to run this during the maintenance window, "
  805. "as this may cause high load on your instance."
  806. ),
  807. fg="yellow",
  808. )
  809. )
  810. if not force:
  811. click.confirm("Do you want to proceed?", abort=True)
  812. # start the cleanup process
  813. click.echo(click.style("Starting orphaned file records cleanup.", fg="white"))
  814. # clean up the orphaned records in the message_files table where message_id doesn't exist in messages table
  815. try:
  816. click.echo(
  817. click.style("- Listing message_files records where message_id doesn't exist in messages table", fg="white")
  818. )
  819. query = (
  820. "SELECT mf.id, mf.message_id "
  821. "FROM message_files mf LEFT JOIN messages m ON mf.message_id = m.id "
  822. "WHERE m.id IS NULL"
  823. )
  824. orphaned_message_files = []
  825. with db.engine.begin() as conn:
  826. rs = conn.execute(sa.text(query))
  827. for i in rs:
  828. orphaned_message_files.append({"id": str(i[0]), "message_id": str(i[1])})
  829. if orphaned_message_files:
  830. click.echo(click.style(f"Found {len(orphaned_message_files)} orphaned message_files records:", fg="white"))
  831. for record in orphaned_message_files:
  832. click.echo(click.style(f" - id: {record['id']}, message_id: {record['message_id']}", fg="black"))
  833. if not force:
  834. click.confirm(
  835. (
  836. f"Do you want to proceed "
  837. f"to delete all {len(orphaned_message_files)} orphaned message_files records?"
  838. ),
  839. abort=True,
  840. )
  841. click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
  842. query = "DELETE FROM message_files WHERE id IN :ids"
  843. with db.engine.begin() as conn:
  844. conn.execute(sa.text(query), {"ids": tuple(record["id"] for record in orphaned_message_files)})
  845. click.echo(
  846. click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
  847. )
  848. else:
  849. click.echo(click.style("No orphaned message_files records found. There is nothing to delete.", fg="green"))
  850. except Exception as e:
  851. click.echo(click.style(f"Error deleting orphaned message_files records: {str(e)}", fg="red"))
  852. # clean up the orphaned records in the rest of the *_files tables
  853. try:
  854. # fetch file id and keys from each table
  855. all_files_in_tables = []
  856. for files_table in files_tables:
  857. click.echo(click.style(f"- Listing file records in table {files_table['table']}", fg="white"))
  858. query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}"
  859. with db.engine.begin() as conn:
  860. rs = conn.execute(sa.text(query))
  861. for i in rs:
  862. all_files_in_tables.append({"table": files_table["table"], "id": str(i[0]), "key": i[1]})
  863. click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
  864. # fetch referred table and columns
  865. guid_regexp = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"
  866. all_ids_in_tables = []
  867. for ids_table in ids_tables:
  868. query = ""
  869. if ids_table["type"] == "uuid":
  870. click.echo(
  871. click.style(
  872. f"- Listing file ids in column {ids_table['column']} in table {ids_table['table']}", fg="white"
  873. )
  874. )
  875. query = (
  876. f"SELECT {ids_table['column']} FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL"
  877. )
  878. with db.engine.begin() as conn:
  879. rs = conn.execute(sa.text(query))
  880. for i in rs:
  881. all_ids_in_tables.append({"table": ids_table["table"], "id": str(i[0])})
  882. elif ids_table["type"] == "text":
  883. click.echo(
  884. click.style(
  885. f"- Listing file-id-like strings in column {ids_table['column']} in table {ids_table['table']}",
  886. fg="white",
  887. )
  888. )
  889. query = (
  890. f"SELECT regexp_matches({ids_table['column']}, '{guid_regexp}', 'g') AS extracted_id "
  891. f"FROM {ids_table['table']}"
  892. )
  893. with db.engine.begin() as conn:
  894. rs = conn.execute(sa.text(query))
  895. for i in rs:
  896. for j in i[0]:
  897. all_ids_in_tables.append({"table": ids_table["table"], "id": j})
  898. elif ids_table["type"] == "json":
  899. click.echo(
  900. click.style(
  901. (
  902. f"- Listing file-id-like JSON string in column {ids_table['column']} "
  903. f"in table {ids_table['table']}"
  904. ),
  905. fg="white",
  906. )
  907. )
  908. query = (
  909. f"SELECT regexp_matches({ids_table['column']}::text, '{guid_regexp}', 'g') AS extracted_id "
  910. f"FROM {ids_table['table']}"
  911. )
  912. with db.engine.begin() as conn:
  913. rs = conn.execute(sa.text(query))
  914. for i in rs:
  915. for j in i[0]:
  916. all_ids_in_tables.append({"table": ids_table["table"], "id": j})
  917. click.echo(click.style(f"Found {len(all_ids_in_tables)} file ids in tables.", fg="white"))
  918. except Exception as e:
  919. click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
  920. return
  921. # find orphaned files
  922. all_files = [file["id"] for file in all_files_in_tables]
  923. all_ids = [file["id"] for file in all_ids_in_tables]
  924. orphaned_files = list(set(all_files) - set(all_ids))
  925. if not orphaned_files:
  926. click.echo(click.style("No orphaned file records found. There is nothing to delete.", fg="green"))
  927. return
  928. click.echo(click.style(f"Found {len(orphaned_files)} orphaned file records.", fg="white"))
  929. for file in orphaned_files:
  930. click.echo(click.style(f"- orphaned file id: {file}", fg="black"))
  931. if not force:
  932. click.confirm(f"Do you want to proceed to delete all {len(orphaned_files)} orphaned file records?", abort=True)
  933. # delete orphaned records for each file
  934. try:
  935. for files_table in files_tables:
  936. click.echo(click.style(f"- Deleting orphaned file records in table {files_table['table']}", fg="white"))
  937. query = f"DELETE FROM {files_table['table']} WHERE {files_table['id_column']} IN :ids"
  938. with db.engine.begin() as conn:
  939. conn.execute(sa.text(query), {"ids": tuple(orphaned_files)})
  940. except Exception as e:
  941. click.echo(click.style(f"Error deleting orphaned file records: {str(e)}", fg="red"))
  942. return
  943. click.echo(click.style(f"Removed {len(orphaned_files)} orphaned file records.", fg="green"))
  944. @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
  945. @click.command("remove-orphaned-files-on-storage", help="Remove orphaned files on the storage.")
  946. def remove_orphaned_files_on_storage(force: bool):
  947. """
  948. Remove orphaned files on the storage.
  949. """
  950. # define tables and columns to process
  951. files_tables = [
  952. {"table": "upload_files", "key_column": "key"},
  953. {"table": "tool_files", "key_column": "file_key"},
  954. ]
  955. storage_paths = ["image_files", "tools", "upload_files"]
  956. # notify user and ask for confirmation
  957. click.echo(click.style("This command will find and remove orphaned files on the storage,", fg="yellow"))
  958. click.echo(
  959. click.style("by comparing the files on the storage with the records in the following tables:", fg="yellow")
  960. )
  961. for files_table in files_tables:
  962. click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
  963. click.echo(click.style("The following paths on the storage will be scanned to find orphaned files:", fg="yellow"))
  964. for storage_path in storage_paths:
  965. click.echo(click.style(f"- {storage_path}", fg="yellow"))
  966. click.echo("")
  967. click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
  968. click.echo(
  969. click.style(
  970. "Currently, this command will work only for opendal based storage (STORAGE_TYPE=opendal).", fg="yellow"
  971. )
  972. )
  973. click.echo(
  974. click.style(
  975. "Since not all patterns have been fully tested, please note that this command may delete unintended files.",
  976. fg="yellow",
  977. )
  978. )
  979. click.echo(
  980. click.style("This cannot be undone. Please make sure to back up your storage before proceeding.", fg="yellow")
  981. )
  982. click.echo(
  983. click.style(
  984. (
  985. "It is also recommended to run this during the maintenance window, "
  986. "as this may cause high load on your instance."
  987. ),
  988. fg="yellow",
  989. )
  990. )
  991. if not force:
  992. click.confirm("Do you want to proceed?", abort=True)
  993. # start the cleanup process
  994. click.echo(click.style("Starting orphaned files cleanup.", fg="white"))
  995. # fetch file id and keys from each table
  996. all_files_in_tables = []
  997. try:
  998. for files_table in files_tables:
  999. click.echo(click.style(f"- Listing files from table {files_table['table']}", fg="white"))
  1000. query = f"SELECT {files_table['key_column']} FROM {files_table['table']}"
  1001. with db.engine.begin() as conn:
  1002. rs = conn.execute(sa.text(query))
  1003. for i in rs:
  1004. all_files_in_tables.append(str(i[0]))
  1005. click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
  1006. except Exception as e:
  1007. click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
  1008. all_files_on_storage = []
  1009. for storage_path in storage_paths:
  1010. try:
  1011. click.echo(click.style(f"- Scanning files on storage path {storage_path}", fg="white"))
  1012. files = storage.scan(path=storage_path, files=True, directories=False)
  1013. all_files_on_storage.extend(files)
  1014. except FileNotFoundError as e:
  1015. click.echo(click.style(f" -> Skipping path {storage_path} as it does not exist.", fg="yellow"))
  1016. continue
  1017. except Exception as e:
  1018. click.echo(click.style(f" -> Error scanning files on storage path {storage_path}: {str(e)}", fg="red"))
  1019. continue
  1020. click.echo(click.style(f"Found {len(all_files_on_storage)} files on storage.", fg="white"))
  1021. # find orphaned files
  1022. orphaned_files = list(set(all_files_on_storage) - set(all_files_in_tables))
  1023. if not orphaned_files:
  1024. click.echo(click.style("No orphaned files found. There is nothing to remove.", fg="green"))
  1025. return
  1026. click.echo(click.style(f"Found {len(orphaned_files)} orphaned files.", fg="white"))
  1027. for file in orphaned_files:
  1028. click.echo(click.style(f"- orphaned file: {file}", fg="black"))
  1029. if not force:
  1030. click.confirm(f"Do you want to proceed to remove all {len(orphaned_files)} orphaned files?", abort=True)
  1031. # delete orphaned files
  1032. removed_files = 0
  1033. error_files = 0
  1034. for file in orphaned_files:
  1035. try:
  1036. storage.delete(file)
  1037. removed_files += 1
  1038. click.echo(click.style(f"- Removing orphaned file: {file}", fg="white"))
  1039. except Exception as e:
  1040. error_files += 1
  1041. click.echo(click.style(f"- Error deleting orphaned file {file}: {str(e)}", fg="red"))
  1042. continue
  1043. if error_files == 0:
  1044. click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
  1045. else:
  1046. click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))
  1047. @click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.")
  1048. @click.option("--provider", prompt=True, help="Provider name")
  1049. @click.option("--client-params", prompt=True, help="Client Params")
  1050. def setup_system_tool_oauth_client(provider, client_params):
  1051. """
  1052. Setup system tool oauth client
  1053. """
  1054. provider_id = ToolProviderID(provider)
  1055. provider_name = provider_id.provider_name
  1056. plugin_id = provider_id.plugin_id
  1057. try:
  1058. # json validate
  1059. click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
  1060. client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
  1061. click.echo(click.style("Client params validated successfully.", fg="green"))
  1062. click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
  1063. click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
  1064. oauth_client_params = encrypt_system_oauth_params(client_params_dict)
  1065. click.echo(click.style("Client params encrypted successfully.", fg="green"))
  1066. except Exception as e:
  1067. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1068. return
  1069. deleted_count = (
  1070. db.session.query(ToolOAuthSystemClient)
  1071. .filter_by(
  1072. provider=provider_name,
  1073. plugin_id=plugin_id,
  1074. )
  1075. .delete()
  1076. )
  1077. if deleted_count > 0:
  1078. click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
  1079. oauth_client = ToolOAuthSystemClient(
  1080. provider=provider_name,
  1081. plugin_id=plugin_id,
  1082. encrypted_oauth_params=oauth_client_params,
  1083. )
  1084. db.session.add(oauth_client)
  1085. db.session.commit()
  1086. click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
  1087. def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
  1088. """
  1089. Find draft variables that reference non-existent apps.
  1090. Args:
  1091. batch_size: Maximum number of orphaned app IDs to return
  1092. Returns:
  1093. List of app IDs that have draft variables but don't exist in the apps table
  1094. """
  1095. query = """
  1096. SELECT DISTINCT wdv.app_id
  1097. FROM workflow_draft_variables AS wdv
  1098. WHERE NOT EXISTS(
  1099. SELECT 1 FROM apps WHERE apps.id = wdv.app_id
  1100. )
  1101. LIMIT :batch_size
  1102. """
  1103. with db.engine.connect() as conn:
  1104. result = conn.execute(sa.text(query), {"batch_size": batch_size})
  1105. return [row[0] for row in result]
  1106. def _count_orphaned_draft_variables() -> dict[str, Any]:
  1107. """
  1108. Count orphaned draft variables by app, including associated file counts.
  1109. Returns:
  1110. Dictionary with statistics about orphaned variables and files
  1111. """
  1112. # Count orphaned variables by app
  1113. variables_query = """
  1114. SELECT
  1115. wdv.app_id,
  1116. COUNT(*) as variable_count,
  1117. COUNT(wdv.file_id) as file_count
  1118. FROM workflow_draft_variables AS wdv
  1119. WHERE NOT EXISTS(
  1120. SELECT 1 FROM apps WHERE apps.id = wdv.app_id
  1121. )
  1122. GROUP BY wdv.app_id
  1123. ORDER BY variable_count DESC
  1124. """
  1125. with db.engine.connect() as conn:
  1126. result = conn.execute(sa.text(variables_query))
  1127. orphaned_by_app = {}
  1128. total_files = 0
  1129. for row in result:
  1130. app_id, variable_count, file_count = row
  1131. orphaned_by_app[app_id] = {"variables": variable_count, "files": file_count}
  1132. total_files += file_count
  1133. total_orphaned = sum(app_data["variables"] for app_data in orphaned_by_app.values())
  1134. app_count = len(orphaned_by_app)
  1135. return {
  1136. "total_orphaned_variables": total_orphaned,
  1137. "total_orphaned_files": total_files,
  1138. "orphaned_app_count": app_count,
  1139. "orphaned_by_app": orphaned_by_app,
  1140. }
  1141. @click.command()
  1142. @click.option("--dry-run", is_flag=True, help="Show what would be deleted without actually deleting")
  1143. @click.option("--batch-size", default=1000, help="Number of records to process per batch (default 1000)")
  1144. @click.option("--max-apps", default=None, type=int, help="Maximum number of apps to process (default: no limit)")
  1145. @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
  1146. def cleanup_orphaned_draft_variables(
  1147. dry_run: bool,
  1148. batch_size: int,
  1149. max_apps: int | None,
  1150. force: bool = False,
  1151. ):
  1152. """
  1153. Clean up orphaned draft variables from the database.
  1154. This script finds and removes draft variables that belong to apps
  1155. that no longer exist in the database.
  1156. """
  1157. logger = logging.getLogger(__name__)
  1158. # Get statistics
  1159. stats = _count_orphaned_draft_variables()
  1160. logger.info("Found %s orphaned draft variables", stats["total_orphaned_variables"])
  1161. logger.info("Found %s associated offload files", stats["total_orphaned_files"])
  1162. logger.info("Across %s non-existent apps", stats["orphaned_app_count"])
  1163. if stats["total_orphaned_variables"] == 0:
  1164. logger.info("No orphaned draft variables found. Exiting.")
  1165. return
  1166. if dry_run:
  1167. logger.info("DRY RUN: Would delete the following:")
  1168. for app_id, data in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1]["variables"], reverse=True)[
  1169. :10
  1170. ]: # Show top 10
  1171. logger.info(" App %s: %s variables, %s files", app_id, data["variables"], data["files"])
  1172. if len(stats["orphaned_by_app"]) > 10:
  1173. logger.info(" ... and %s more apps", len(stats["orphaned_by_app"]) - 10)
  1174. return
  1175. # Confirm deletion
  1176. if not force:
  1177. click.confirm(
  1178. f"Are you sure you want to delete {stats['total_orphaned_variables']} "
  1179. f"orphaned draft variables and {stats['total_orphaned_files']} associated files "
  1180. f"from {stats['orphaned_app_count']} apps?",
  1181. abort=True,
  1182. )
  1183. total_deleted = 0
  1184. processed_apps = 0
  1185. while True:
  1186. if max_apps and processed_apps >= max_apps:
  1187. logger.info("Reached maximum app limit (%s). Stopping.", max_apps)
  1188. break
  1189. orphaned_app_ids = _find_orphaned_draft_variables(batch_size=10)
  1190. if not orphaned_app_ids:
  1191. logger.info("No more orphaned draft variables found.")
  1192. break
  1193. for app_id in orphaned_app_ids:
  1194. if max_apps and processed_apps >= max_apps:
  1195. break
  1196. try:
  1197. deleted_count = delete_draft_variables_batch(app_id, batch_size)
  1198. total_deleted += deleted_count
  1199. processed_apps += 1
  1200. logger.info("Deleted %s variables for app %s", deleted_count, app_id)
  1201. except Exception:
  1202. logger.exception("Error processing app %s", app_id)
  1203. continue
  1204. logger.info("Cleanup completed. Total deleted: %s variables across %s apps", total_deleted, processed_apps)
  1205. @click.command("setup-datasource-oauth-client", help="Setup datasource oauth client.")
  1206. @click.option("--provider", prompt=True, help="Provider name")
  1207. @click.option("--client-params", prompt=True, help="Client Params")
  1208. def setup_datasource_oauth_client(provider, client_params):
  1209. """
  1210. Setup datasource oauth client
  1211. """
  1212. provider_id = DatasourceProviderID(provider)
  1213. provider_name = provider_id.provider_name
  1214. plugin_id = provider_id.plugin_id
  1215. try:
  1216. # json validate
  1217. click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
  1218. client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
  1219. click.echo(click.style("Client params validated successfully.", fg="green"))
  1220. except Exception as e:
  1221. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1222. return
  1223. click.echo(click.style(f"Ready to delete existing oauth client params: {provider_name}", fg="yellow"))
  1224. deleted_count = (
  1225. db.session.query(DatasourceOauthParamConfig)
  1226. .filter_by(
  1227. provider=provider_name,
  1228. plugin_id=plugin_id,
  1229. )
  1230. .delete()
  1231. )
  1232. if deleted_count > 0:
  1233. click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
  1234. click.echo(click.style(f"Ready to setup datasource oauth client: {provider_name}", fg="yellow"))
  1235. oauth_client = DatasourceOauthParamConfig(
  1236. provider=provider_name,
  1237. plugin_id=plugin_id,
  1238. system_credentials=client_params_dict,
  1239. )
  1240. db.session.add(oauth_client)
  1241. db.session.commit()
  1242. click.echo(click.style(f"provider: {provider_name}", fg="green"))
  1243. click.echo(click.style(f"plugin_id: {plugin_id}", fg="green"))
  1244. click.echo(click.style(f"params: {json.dumps(client_params_dict, indent=2, ensure_ascii=False)}", fg="green"))
  1245. click.echo(click.style(f"Datasource oauth client setup successfully. id: {oauth_client.id}", fg="green"))
  1246. @click.command("transform-datasource-credentials", help="Transform datasource credentials.")
  1247. def transform_datasource_credentials():
  1248. """
  1249. Transform datasource credentials
  1250. """
  1251. try:
  1252. installer_manager = PluginInstaller()
  1253. plugin_migration = PluginMigration()
  1254. notion_plugin_id = "langgenius/notion_datasource"
  1255. firecrawl_plugin_id = "langgenius/firecrawl_datasource"
  1256. jina_plugin_id = "langgenius/jina_datasource"
  1257. notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
  1258. firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
  1259. jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
  1260. oauth_credential_type = CredentialType.OAUTH2
  1261. api_key_credential_type = CredentialType.API_KEY
  1262. # deal notion credentials
  1263. deal_notion_count = 0
  1264. notion_credentials = db.session.query(DataSourceOauthBinding).filter_by(provider="notion").all()
  1265. if notion_credentials:
  1266. notion_credentials_tenant_mapping: dict[str, list[DataSourceOauthBinding]] = {}
  1267. for notion_credential in notion_credentials:
  1268. tenant_id = notion_credential.tenant_id
  1269. if tenant_id not in notion_credentials_tenant_mapping:
  1270. notion_credentials_tenant_mapping[tenant_id] = []
  1271. notion_credentials_tenant_mapping[tenant_id].append(notion_credential)
  1272. for tenant_id, notion_tenant_credentials in notion_credentials_tenant_mapping.items():
  1273. tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
  1274. if not tenant:
  1275. continue
  1276. try:
  1277. # check notion plugin is installed
  1278. installed_plugins = installer_manager.list_plugins(tenant_id)
  1279. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1280. if notion_plugin_id not in installed_plugins_ids:
  1281. if notion_plugin_unique_identifier:
  1282. # install notion plugin
  1283. PluginService.install_from_marketplace_pkg(tenant_id, [notion_plugin_unique_identifier])
  1284. auth_count = 0
  1285. for notion_tenant_credential in notion_tenant_credentials:
  1286. auth_count += 1
  1287. # get credential oauth params
  1288. access_token = notion_tenant_credential.access_token
  1289. # notion info
  1290. notion_info = notion_tenant_credential.source_info
  1291. workspace_id = notion_info.get("workspace_id")
  1292. workspace_name = notion_info.get("workspace_name")
  1293. workspace_icon = notion_info.get("workspace_icon")
  1294. new_credentials = {
  1295. "integration_secret": encrypter.encrypt_token(tenant_id, access_token),
  1296. "workspace_id": workspace_id,
  1297. "workspace_name": workspace_name,
  1298. "workspace_icon": workspace_icon,
  1299. }
  1300. datasource_provider = DatasourceProvider(
  1301. provider="notion_datasource",
  1302. tenant_id=tenant_id,
  1303. plugin_id=notion_plugin_id,
  1304. auth_type=oauth_credential_type.value,
  1305. encrypted_credentials=new_credentials,
  1306. name=f"Auth {auth_count}",
  1307. avatar_url=workspace_icon or "default",
  1308. is_default=False,
  1309. )
  1310. db.session.add(datasource_provider)
  1311. deal_notion_count += 1
  1312. except Exception as e:
  1313. click.echo(
  1314. click.style(
  1315. f"Error transforming notion credentials: {str(e)}, tenant_id: {tenant_id}", fg="red"
  1316. )
  1317. )
  1318. continue
  1319. db.session.commit()
  1320. # deal firecrawl credentials
  1321. deal_firecrawl_count = 0
  1322. firecrawl_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="firecrawl").all()
  1323. if firecrawl_credentials:
  1324. firecrawl_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
  1325. for firecrawl_credential in firecrawl_credentials:
  1326. tenant_id = firecrawl_credential.tenant_id
  1327. if tenant_id not in firecrawl_credentials_tenant_mapping:
  1328. firecrawl_credentials_tenant_mapping[tenant_id] = []
  1329. firecrawl_credentials_tenant_mapping[tenant_id].append(firecrawl_credential)
  1330. for tenant_id, firecrawl_tenant_credentials in firecrawl_credentials_tenant_mapping.items():
  1331. tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
  1332. if not tenant:
  1333. continue
  1334. try:
  1335. # check firecrawl plugin is installed
  1336. installed_plugins = installer_manager.list_plugins(tenant_id)
  1337. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1338. if firecrawl_plugin_id not in installed_plugins_ids:
  1339. if firecrawl_plugin_unique_identifier:
  1340. # install firecrawl plugin
  1341. PluginService.install_from_marketplace_pkg(tenant_id, [firecrawl_plugin_unique_identifier])
  1342. auth_count = 0
  1343. for firecrawl_tenant_credential in firecrawl_tenant_credentials:
  1344. auth_count += 1
  1345. # get credential api key
  1346. credentials_json = json.loads(firecrawl_tenant_credential.credentials)
  1347. api_key = credentials_json.get("config", {}).get("api_key")
  1348. base_url = credentials_json.get("config", {}).get("base_url")
  1349. new_credentials = {
  1350. "firecrawl_api_key": api_key,
  1351. "base_url": base_url,
  1352. }
  1353. datasource_provider = DatasourceProvider(
  1354. provider="firecrawl",
  1355. tenant_id=tenant_id,
  1356. plugin_id=firecrawl_plugin_id,
  1357. auth_type=api_key_credential_type.value,
  1358. encrypted_credentials=new_credentials,
  1359. name=f"Auth {auth_count}",
  1360. avatar_url="default",
  1361. is_default=False,
  1362. )
  1363. db.session.add(datasource_provider)
  1364. deal_firecrawl_count += 1
  1365. except Exception as e:
  1366. click.echo(
  1367. click.style(
  1368. f"Error transforming firecrawl credentials: {str(e)}, tenant_id: {tenant_id}", fg="red"
  1369. )
  1370. )
  1371. continue
  1372. db.session.commit()
  1373. # deal jina credentials
  1374. deal_jina_count = 0
  1375. jina_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="jinareader").all()
  1376. if jina_credentials:
  1377. jina_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
  1378. for jina_credential in jina_credentials:
  1379. tenant_id = jina_credential.tenant_id
  1380. if tenant_id not in jina_credentials_tenant_mapping:
  1381. jina_credentials_tenant_mapping[tenant_id] = []
  1382. jina_credentials_tenant_mapping[tenant_id].append(jina_credential)
  1383. for tenant_id, jina_tenant_credentials in jina_credentials_tenant_mapping.items():
  1384. tenant = db.session.query(Tenant).filter_by(id=tenant_id).first()
  1385. if not tenant:
  1386. continue
  1387. try:
  1388. # check jina plugin is installed
  1389. installed_plugins = installer_manager.list_plugins(tenant_id)
  1390. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1391. if jina_plugin_id not in installed_plugins_ids:
  1392. if jina_plugin_unique_identifier:
  1393. # install jina plugin
  1394. logger.debug("Installing Jina plugin %s", jina_plugin_unique_identifier)
  1395. PluginService.install_from_marketplace_pkg(tenant_id, [jina_plugin_unique_identifier])
  1396. auth_count = 0
  1397. for jina_tenant_credential in jina_tenant_credentials:
  1398. auth_count += 1
  1399. # get credential api key
  1400. credentials_json = json.loads(jina_tenant_credential.credentials)
  1401. api_key = credentials_json.get("config", {}).get("api_key")
  1402. new_credentials = {
  1403. "integration_secret": api_key,
  1404. }
  1405. datasource_provider = DatasourceProvider(
  1406. provider="jina",
  1407. tenant_id=tenant_id,
  1408. plugin_id=jina_plugin_id,
  1409. auth_type=api_key_credential_type.value,
  1410. encrypted_credentials=new_credentials,
  1411. name=f"Auth {auth_count}",
  1412. avatar_url="default",
  1413. is_default=False,
  1414. )
  1415. db.session.add(datasource_provider)
  1416. deal_jina_count += 1
  1417. except Exception as e:
  1418. click.echo(
  1419. click.style(f"Error transforming jina credentials: {str(e)}, tenant_id: {tenant_id}", fg="red")
  1420. )
  1421. continue
  1422. db.session.commit()
  1423. except Exception as e:
  1424. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1425. return
  1426. click.echo(click.style(f"Transforming notion successfully. deal_notion_count: {deal_notion_count}", fg="green"))
  1427. click.echo(
  1428. click.style(f"Transforming firecrawl successfully. deal_firecrawl_count: {deal_firecrawl_count}", fg="green")
  1429. )
  1430. click.echo(click.style(f"Transforming jina successfully. deal_jina_count: {deal_jina_count}", fg="green"))
  1431. @click.command("install-rag-pipeline-plugins", help="Install rag pipeline plugins.")
  1432. @click.option(
  1433. "--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
  1434. )
  1435. @click.option(
  1436. "--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
  1437. )
  1438. @click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
  1439. def install_rag_pipeline_plugins(input_file, output_file, workers):
  1440. """
  1441. Install rag pipeline plugins
  1442. """
  1443. click.echo(click.style("Installing rag pipeline plugins", fg="yellow"))
  1444. plugin_migration = PluginMigration()
  1445. plugin_migration.install_rag_pipeline_plugins(
  1446. input_file,
  1447. output_file,
  1448. workers,
  1449. )
  1450. click.echo(click.style("Installing rag pipeline plugins successfully", fg="green"))
  1451. @click.command(
  1452. "migrate-oss",
  1453. help="Migrate files from Local or OpenDAL source to a cloud OSS storage (destination must NOT be local/opendal).",
  1454. )
  1455. @click.option(
  1456. "--path",
  1457. "paths",
  1458. multiple=True,
  1459. help="Storage path prefixes to migrate (repeatable). Defaults: privkeys, upload_files, image_files,"
  1460. " tools, website_files, keyword_files, ops_trace",
  1461. )
  1462. @click.option(
  1463. "--source",
  1464. type=click.Choice(["local", "opendal"], case_sensitive=False),
  1465. default="opendal",
  1466. show_default=True,
  1467. help="Source storage type to read from",
  1468. )
  1469. @click.option("--overwrite", is_flag=True, default=False, help="Overwrite destination if file already exists")
  1470. @click.option("--dry-run", is_flag=True, default=False, help="Show what would be migrated without uploading")
  1471. @click.option("-f", "--force", is_flag=True, help="Skip confirmation and run without prompts")
  1472. @click.option(
  1473. "--update-db/--no-update-db",
  1474. default=True,
  1475. help="Update upload_files.storage_type from source type to current storage after migration",
  1476. )
  1477. def migrate_oss(
  1478. paths: tuple[str, ...],
  1479. source: str,
  1480. overwrite: bool,
  1481. dry_run: bool,
  1482. force: bool,
  1483. update_db: bool,
  1484. ):
  1485. """
  1486. Copy all files under selected prefixes from a source storage
  1487. (Local filesystem or OpenDAL-backed) into the currently configured
  1488. destination storage backend, then optionally update DB records.
  1489. Expected usage: set STORAGE_TYPE (and its credentials) to your target backend.
  1490. """
  1491. # Ensure target storage is not local/opendal
  1492. if dify_config.STORAGE_TYPE in (StorageType.LOCAL, StorageType.OPENDAL):
  1493. click.echo(
  1494. click.style(
  1495. "Target STORAGE_TYPE must be a cloud OSS (not 'local' or 'opendal').\n"
  1496. "Please set STORAGE_TYPE to one of: s3, aliyun-oss, azure-blob, google-storage, tencent-cos, \n"
  1497. "volcengine-tos, supabase, oci-storage, huawei-obs, baidu-obs, clickzetta-volume.",
  1498. fg="red",
  1499. )
  1500. )
  1501. return
  1502. # Default paths if none specified
  1503. default_paths = ("privkeys", "upload_files", "image_files", "tools", "website_files", "keyword_files", "ops_trace")
  1504. path_list = list(paths) if paths else list(default_paths)
  1505. is_source_local = source.lower() == "local"
  1506. click.echo(click.style("Preparing migration to target storage.", fg="yellow"))
  1507. click.echo(click.style(f"Target storage type: {dify_config.STORAGE_TYPE}", fg="white"))
  1508. if is_source_local:
  1509. src_root = dify_config.STORAGE_LOCAL_PATH
  1510. click.echo(click.style(f"Source: local fs, root: {src_root}", fg="white"))
  1511. else:
  1512. click.echo(click.style(f"Source: opendal scheme={dify_config.OPENDAL_SCHEME}", fg="white"))
  1513. click.echo(click.style(f"Paths to migrate: {', '.join(path_list)}", fg="white"))
  1514. click.echo("")
  1515. if not force:
  1516. click.confirm("Proceed with migration?", abort=True)
  1517. # Instantiate source storage
  1518. try:
  1519. if is_source_local:
  1520. src_root = dify_config.STORAGE_LOCAL_PATH
  1521. source_storage = OpenDALStorage(scheme="fs", root=src_root)
  1522. else:
  1523. source_storage = OpenDALStorage(scheme=dify_config.OPENDAL_SCHEME)
  1524. except Exception as e:
  1525. click.echo(click.style(f"Failed to initialize source storage: {str(e)}", fg="red"))
  1526. return
  1527. total_files = 0
  1528. copied_files = 0
  1529. skipped_files = 0
  1530. errored_files = 0
  1531. copied_upload_file_keys: list[str] = []
  1532. for prefix in path_list:
  1533. click.echo(click.style(f"Scanning source path: {prefix}", fg="white"))
  1534. try:
  1535. keys = source_storage.scan(path=prefix, files=True, directories=False)
  1536. except FileNotFoundError:
  1537. click.echo(click.style(f" -> Skipping missing path: {prefix}", fg="yellow"))
  1538. continue
  1539. except NotImplementedError:
  1540. click.echo(click.style(" -> Source storage does not support scanning.", fg="red"))
  1541. return
  1542. except Exception as e:
  1543. click.echo(click.style(f" -> Error scanning '{prefix}': {str(e)}", fg="red"))
  1544. continue
  1545. click.echo(click.style(f"Found {len(keys)} files under {prefix}", fg="white"))
  1546. for key in keys:
  1547. total_files += 1
  1548. # check destination existence
  1549. if not overwrite:
  1550. try:
  1551. if storage.exists(key):
  1552. skipped_files += 1
  1553. continue
  1554. except Exception as e:
  1555. # existence check failures should not block migration attempt
  1556. # but should be surfaced to user as a warning for visibility
  1557. click.echo(
  1558. click.style(
  1559. f" -> Warning: failed target existence check for {key}: {str(e)}",
  1560. fg="yellow",
  1561. )
  1562. )
  1563. if dry_run:
  1564. copied_files += 1
  1565. continue
  1566. # read from source and write to destination
  1567. try:
  1568. data = source_storage.load_once(key)
  1569. except FileNotFoundError:
  1570. errored_files += 1
  1571. click.echo(click.style(f" -> Missing on source: {key}", fg="yellow"))
  1572. continue
  1573. except Exception as e:
  1574. errored_files += 1
  1575. click.echo(click.style(f" -> Error reading {key}: {str(e)}", fg="red"))
  1576. continue
  1577. try:
  1578. storage.save(key, data)
  1579. copied_files += 1
  1580. if prefix == "upload_files":
  1581. copied_upload_file_keys.append(key)
  1582. except Exception as e:
  1583. errored_files += 1
  1584. click.echo(click.style(f" -> Error writing {key} to target: {str(e)}", fg="red"))
  1585. continue
  1586. click.echo("")
  1587. click.echo(click.style("Migration summary:", fg="yellow"))
  1588. click.echo(click.style(f" Total: {total_files}", fg="white"))
  1589. click.echo(click.style(f" Copied: {copied_files}", fg="green"))
  1590. click.echo(click.style(f" Skipped: {skipped_files}", fg="white"))
  1591. if errored_files:
  1592. click.echo(click.style(f" Errors: {errored_files}", fg="red"))
  1593. if dry_run:
  1594. click.echo(click.style("Dry-run complete. No changes were made.", fg="green"))
  1595. return
  1596. if errored_files:
  1597. click.echo(
  1598. click.style(
  1599. "Some files failed to migrate. Review errors above before updating DB records.",
  1600. fg="yellow",
  1601. )
  1602. )
  1603. if update_db and not force:
  1604. if not click.confirm("Proceed to update DB storage_type despite errors?", default=False):
  1605. update_db = False
  1606. # Optionally update DB records for upload_files.storage_type (only for successfully copied upload_files)
  1607. if update_db:
  1608. if not copied_upload_file_keys:
  1609. click.echo(click.style("No upload_files copied. Skipping DB storage_type update.", fg="yellow"))
  1610. else:
  1611. try:
  1612. source_storage_type = StorageType.LOCAL if is_source_local else StorageType.OPENDAL
  1613. updated = (
  1614. db.session.query(UploadFile)
  1615. .where(
  1616. UploadFile.storage_type == source_storage_type,
  1617. UploadFile.key.in_(copied_upload_file_keys),
  1618. )
  1619. .update({UploadFile.storage_type: dify_config.STORAGE_TYPE}, synchronize_session=False)
  1620. )
  1621. db.session.commit()
  1622. click.echo(click.style(f"Updated storage_type for {updated} upload_files records.", fg="green"))
  1623. except Exception as e:
  1624. db.session.rollback()
  1625. click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red"))