您最多选择25个主题 主题必须以字母或数字开头,可以包含连字符 (-),并且长度不得超过35个字符

commands.py 60KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434
  1. import base64
  2. import json
  3. import logging
  4. import secrets
  5. from typing import Any, Optional
  6. import click
  7. import sqlalchemy as sa
  8. from flask import current_app
  9. from pydantic import TypeAdapter
  10. from sqlalchemy import select
  11. from werkzeug.exceptions import NotFound
  12. from configs import dify_config
  13. from constants.languages import languages
  14. from core.helper import encrypter
  15. from core.plugin.entities.plugin import DatasourceProviderID, PluginInstallationSource, ToolProviderID
  16. from core.plugin.impl.plugin import PluginInstaller
  17. from core.rag.datasource.vdb.vector_factory import Vector
  18. from core.rag.datasource.vdb.vector_type import VectorType
  19. from core.rag.index_processor.constant.built_in_field import BuiltInField
  20. from core.rag.models.document import Document
  21. from core.tools.entities.tool_entities import CredentialType
  22. from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
  23. from events.app_event import app_was_created
  24. from extensions.ext_database import db
  25. from extensions.ext_redis import redis_client
  26. from extensions.ext_storage import storage
  27. from libs.helper import email as email_validate
  28. from libs.password import hash_password, password_pattern, valid_password
  29. from libs.rsa import generate_key_pair
  30. from models import Tenant
  31. from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment
  32. from models.dataset import Document as DatasetDocument
  33. from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
  34. from models.oauth import DatasourceOauthParamConfig, DatasourceProvider
  35. from models.provider import Provider, ProviderModel
  36. from models.source import DataSourceApiKeyAuthBinding, DataSourceOauthBinding
  37. from models.tools import ToolOAuthSystemClient
  38. from services.account_service import AccountService, RegisterService, TenantService
  39. from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
  40. from services.plugin.data_migration import PluginDataMigration
  41. from services.plugin.plugin_migration import PluginMigration
  42. @click.command("reset-password", help="Reset the account password.")
  43. @click.option("--email", prompt=True, help="Account email to reset password for")
  44. @click.option("--new-password", prompt=True, help="New password")
  45. @click.option("--password-confirm", prompt=True, help="Confirm new password")
  46. def reset_password(email, new_password, password_confirm):
  47. """
  48. Reset password of owner account
  49. Only available in SELF_HOSTED mode
  50. """
  51. if str(new_password).strip() != str(password_confirm).strip():
  52. click.echo(click.style("Passwords do not match.", fg="red"))
  53. return
  54. account = db.session.query(Account).where(Account.email == email).one_or_none()
  55. if not account:
  56. click.echo(click.style(f"Account not found for email: {email}", fg="red"))
  57. return
  58. try:
  59. valid_password(new_password)
  60. except:
  61. click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
  62. return
  63. # generate password salt
  64. salt = secrets.token_bytes(16)
  65. base64_salt = base64.b64encode(salt).decode()
  66. # encrypt password with salt
  67. password_hashed = hash_password(new_password, salt)
  68. base64_password_hashed = base64.b64encode(password_hashed).decode()
  69. account.password = base64_password_hashed
  70. account.password_salt = base64_salt
  71. db.session.commit()
  72. AccountService.reset_login_error_rate_limit(email)
  73. click.echo(click.style("Password reset successfully.", fg="green"))
  74. @click.command("reset-email", help="Reset the account email.")
  75. @click.option("--email", prompt=True, help="Current account email")
  76. @click.option("--new-email", prompt=True, help="New email")
  77. @click.option("--email-confirm", prompt=True, help="Confirm new email")
  78. def reset_email(email, new_email, email_confirm):
  79. """
  80. Replace account email
  81. :return:
  82. """
  83. if str(new_email).strip() != str(email_confirm).strip():
  84. click.echo(click.style("New emails do not match.", fg="red"))
  85. return
  86. account = db.session.query(Account).where(Account.email == email).one_or_none()
  87. if not account:
  88. click.echo(click.style(f"Account not found for email: {email}", fg="red"))
  89. return
  90. try:
  91. email_validate(new_email)
  92. except:
  93. click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
  94. return
  95. account.email = new_email
  96. db.session.commit()
  97. click.echo(click.style("Email updated successfully.", fg="green"))
  98. @click.command(
  99. "reset-encrypt-key-pair",
  100. help="Reset the asymmetric key pair of workspace for encrypt LLM credentials. "
  101. "After the reset, all LLM credentials will become invalid, "
  102. "requiring re-entry."
  103. "Only support SELF_HOSTED mode.",
  104. )
  105. @click.confirmation_option(
  106. prompt=click.style(
  107. "Are you sure you want to reset encrypt key pair? This operation cannot be rolled back!", fg="red"
  108. )
  109. )
  110. def reset_encrypt_key_pair():
  111. """
  112. Reset the encrypted key pair of workspace for encrypt LLM credentials.
  113. After the reset, all LLM credentials will become invalid, requiring re-entry.
  114. Only support SELF_HOSTED mode.
  115. """
  116. if dify_config.EDITION != "SELF_HOSTED":
  117. click.echo(click.style("This command is only for SELF_HOSTED installations.", fg="red"))
  118. return
  119. tenants = db.session.query(Tenant).all()
  120. for tenant in tenants:
  121. if not tenant:
  122. click.echo(click.style("No workspaces found. Run /install first.", fg="red"))
  123. return
  124. tenant.encrypt_public_key = generate_key_pair(tenant.id)
  125. db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
  126. db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
  127. db.session.commit()
  128. click.echo(
  129. click.style(
  130. f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
  131. fg="green",
  132. )
  133. )
  134. @click.command("vdb-migrate", help="Migrate vector db.")
  135. @click.option("--scope", default="all", prompt=False, help="The scope of vector database to migrate, Default is All.")
  136. def vdb_migrate(scope: str):
  137. if scope in {"knowledge", "all"}:
  138. migrate_knowledge_vector_database()
  139. if scope in {"annotation", "all"}:
  140. migrate_annotation_vector_database()
  141. def migrate_annotation_vector_database():
  142. """
  143. Migrate annotation datas to target vector database .
  144. """
  145. click.echo(click.style("Starting annotation data migration.", fg="green"))
  146. create_count = 0
  147. skipped_count = 0
  148. total_count = 0
  149. page = 1
  150. while True:
  151. try:
  152. # get apps info
  153. per_page = 50
  154. apps = (
  155. db.session.query(App)
  156. .where(App.status == "normal")
  157. .order_by(App.created_at.desc())
  158. .limit(per_page)
  159. .offset((page - 1) * per_page)
  160. .all()
  161. )
  162. if not apps:
  163. break
  164. except NotFound:
  165. break
  166. page += 1
  167. for app in apps:
  168. total_count = total_count + 1
  169. click.echo(
  170. f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
  171. )
  172. try:
  173. click.echo(f"Creating app annotation index: {app.id}")
  174. app_annotation_setting = (
  175. db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
  176. )
  177. if not app_annotation_setting:
  178. skipped_count = skipped_count + 1
  179. click.echo(f"App annotation setting disabled: {app.id}")
  180. continue
  181. # get dataset_collection_binding info
  182. dataset_collection_binding = (
  183. db.session.query(DatasetCollectionBinding)
  184. .where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
  185. .first()
  186. )
  187. if not dataset_collection_binding:
  188. click.echo(f"App annotation collection binding not found: {app.id}")
  189. continue
  190. annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all()
  191. dataset = Dataset(
  192. id=app.id,
  193. tenant_id=app.tenant_id,
  194. indexing_technique="high_quality",
  195. embedding_model_provider=dataset_collection_binding.provider_name,
  196. embedding_model=dataset_collection_binding.model_name,
  197. collection_binding_id=dataset_collection_binding.id,
  198. )
  199. documents = []
  200. if annotations:
  201. for annotation in annotations:
  202. document = Document(
  203. page_content=annotation.question,
  204. metadata={"annotation_id": annotation.id, "app_id": app.id, "doc_id": annotation.id},
  205. )
  206. documents.append(document)
  207. vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
  208. click.echo(f"Migrating annotations for app: {app.id}.")
  209. try:
  210. vector.delete()
  211. click.echo(click.style(f"Deleted vector index for app {app.id}.", fg="green"))
  212. except Exception as e:
  213. click.echo(click.style(f"Failed to delete vector index for app {app.id}.", fg="red"))
  214. raise e
  215. if documents:
  216. try:
  217. click.echo(
  218. click.style(
  219. f"Creating vector index with {len(documents)} annotations for app {app.id}.",
  220. fg="green",
  221. )
  222. )
  223. vector.create(documents)
  224. click.echo(click.style(f"Created vector index for app {app.id}.", fg="green"))
  225. except Exception as e:
  226. click.echo(click.style(f"Failed to created vector index for app {app.id}.", fg="red"))
  227. raise e
  228. click.echo(f"Successfully migrated app annotation {app.id}.")
  229. create_count += 1
  230. except Exception as e:
  231. click.echo(
  232. click.style(f"Error creating app annotation index: {e.__class__.__name__} {str(e)}", fg="red")
  233. )
  234. continue
  235. click.echo(
  236. click.style(
  237. f"Migration complete. Created {create_count} app annotation indexes. Skipped {skipped_count} apps.",
  238. fg="green",
  239. )
  240. )
  241. def migrate_knowledge_vector_database():
  242. """
  243. Migrate vector database datas to target vector database .
  244. """
  245. click.echo(click.style("Starting vector database migration.", fg="green"))
  246. create_count = 0
  247. skipped_count = 0
  248. total_count = 0
  249. vector_type = dify_config.VECTOR_STORE
  250. upper_collection_vector_types = {
  251. VectorType.MILVUS,
  252. VectorType.PGVECTOR,
  253. VectorType.VASTBASE,
  254. VectorType.RELYT,
  255. VectorType.WEAVIATE,
  256. VectorType.ORACLE,
  257. VectorType.ELASTICSEARCH,
  258. VectorType.OPENGAUSS,
  259. VectorType.TABLESTORE,
  260. VectorType.MATRIXONE,
  261. }
  262. lower_collection_vector_types = {
  263. VectorType.ANALYTICDB,
  264. VectorType.CHROMA,
  265. VectorType.MYSCALE,
  266. VectorType.PGVECTO_RS,
  267. VectorType.TIDB_VECTOR,
  268. VectorType.OPENSEARCH,
  269. VectorType.TENCENT,
  270. VectorType.BAIDU,
  271. VectorType.VIKINGDB,
  272. VectorType.UPSTASH,
  273. VectorType.COUCHBASE,
  274. VectorType.OCEANBASE,
  275. }
  276. page = 1
  277. while True:
  278. try:
  279. stmt = (
  280. select(Dataset).where(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
  281. )
  282. datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
  283. except NotFound:
  284. break
  285. page += 1
  286. for dataset in datasets:
  287. total_count = total_count + 1
  288. click.echo(
  289. f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped."
  290. )
  291. try:
  292. click.echo(f"Creating dataset vector database index: {dataset.id}")
  293. if dataset.index_struct_dict:
  294. if dataset.index_struct_dict["type"] == vector_type:
  295. skipped_count = skipped_count + 1
  296. continue
  297. collection_name = ""
  298. dataset_id = dataset.id
  299. if vector_type in upper_collection_vector_types:
  300. collection_name = Dataset.gen_collection_name_by_id(dataset_id)
  301. elif vector_type == VectorType.QDRANT:
  302. if dataset.collection_binding_id:
  303. dataset_collection_binding = (
  304. db.session.query(DatasetCollectionBinding)
  305. .where(DatasetCollectionBinding.id == dataset.collection_binding_id)
  306. .one_or_none()
  307. )
  308. if dataset_collection_binding:
  309. collection_name = dataset_collection_binding.collection_name
  310. else:
  311. raise ValueError("Dataset Collection Binding not found")
  312. else:
  313. collection_name = Dataset.gen_collection_name_by_id(dataset_id)
  314. elif vector_type in lower_collection_vector_types:
  315. collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower()
  316. else:
  317. raise ValueError(f"Vector store {vector_type} is not supported.")
  318. index_struct_dict = {"type": vector_type, "vector_store": {"class_prefix": collection_name}}
  319. dataset.index_struct = json.dumps(index_struct_dict)
  320. vector = Vector(dataset)
  321. click.echo(f"Migrating dataset {dataset.id}.")
  322. try:
  323. vector.delete()
  324. click.echo(
  325. click.style(f"Deleted vector index {collection_name} for dataset {dataset.id}.", fg="green")
  326. )
  327. except Exception as e:
  328. click.echo(
  329. click.style(
  330. f"Failed to delete vector index {collection_name} for dataset {dataset.id}.", fg="red"
  331. )
  332. )
  333. raise e
  334. dataset_documents = (
  335. db.session.query(DatasetDocument)
  336. .where(
  337. DatasetDocument.dataset_id == dataset.id,
  338. DatasetDocument.indexing_status == "completed",
  339. DatasetDocument.enabled == True,
  340. DatasetDocument.archived == False,
  341. )
  342. .all()
  343. )
  344. documents = []
  345. segments_count = 0
  346. for dataset_document in dataset_documents:
  347. segments = (
  348. db.session.query(DocumentSegment)
  349. .where(
  350. DocumentSegment.document_id == dataset_document.id,
  351. DocumentSegment.status == "completed",
  352. DocumentSegment.enabled == True,
  353. )
  354. .all()
  355. )
  356. for segment in segments:
  357. document = Document(
  358. page_content=segment.content,
  359. metadata={
  360. "doc_id": segment.index_node_id,
  361. "doc_hash": segment.index_node_hash,
  362. "document_id": segment.document_id,
  363. "dataset_id": segment.dataset_id,
  364. },
  365. )
  366. documents.append(document)
  367. segments_count = segments_count + 1
  368. if documents:
  369. try:
  370. click.echo(
  371. click.style(
  372. f"Creating vector index with {len(documents)} documents of {segments_count}"
  373. f" segments for dataset {dataset.id}.",
  374. fg="green",
  375. )
  376. )
  377. vector.create(documents)
  378. click.echo(click.style(f"Created vector index for dataset {dataset.id}.", fg="green"))
  379. except Exception as e:
  380. click.echo(click.style(f"Failed to created vector index for dataset {dataset.id}.", fg="red"))
  381. raise e
  382. db.session.add(dataset)
  383. db.session.commit()
  384. click.echo(f"Successfully migrated dataset {dataset.id}.")
  385. create_count += 1
  386. except Exception as e:
  387. db.session.rollback()
  388. click.echo(click.style(f"Error creating dataset index: {e.__class__.__name__} {str(e)}", fg="red"))
  389. continue
  390. click.echo(
  391. click.style(
  392. f"Migration complete. Created {create_count} dataset indexes. Skipped {skipped_count} datasets.", fg="green"
  393. )
  394. )
  395. @click.command("convert-to-agent-apps", help="Convert Agent Assistant to Agent App.")
  396. def convert_to_agent_apps():
  397. """
  398. Convert Agent Assistant to Agent App.
  399. """
  400. click.echo(click.style("Starting convert to agent apps.", fg="green"))
  401. proceeded_app_ids = []
  402. while True:
  403. # fetch first 1000 apps
  404. sql_query = """SELECT a.id AS id FROM apps a
  405. INNER JOIN app_model_configs am ON a.app_model_config_id=am.id
  406. WHERE a.mode = 'chat'
  407. AND am.agent_mode is not null
  408. AND (
  409. am.agent_mode like '%"strategy": "function_call"%'
  410. OR am.agent_mode like '%"strategy": "react"%'
  411. )
  412. AND (
  413. am.agent_mode like '{"enabled": true%'
  414. OR am.agent_mode like '{"max_iteration": %'
  415. ) ORDER BY a.created_at DESC LIMIT 1000
  416. """
  417. with db.engine.begin() as conn:
  418. rs = conn.execute(sa.text(sql_query))
  419. apps = []
  420. for i in rs:
  421. app_id = str(i.id)
  422. if app_id not in proceeded_app_ids:
  423. proceeded_app_ids.append(app_id)
  424. app = db.session.query(App).where(App.id == app_id).first()
  425. if app is not None:
  426. apps.append(app)
  427. if len(apps) == 0:
  428. break
  429. for app in apps:
  430. click.echo(f"Converting app: {app.id}")
  431. try:
  432. app.mode = AppMode.AGENT_CHAT.value
  433. db.session.commit()
  434. # update conversation mode to agent
  435. db.session.query(Conversation).where(Conversation.app_id == app.id).update(
  436. {Conversation.mode: AppMode.AGENT_CHAT.value}
  437. )
  438. db.session.commit()
  439. click.echo(click.style(f"Converted app: {app.id}", fg="green"))
  440. except Exception as e:
  441. click.echo(click.style(f"Convert app error: {e.__class__.__name__} {str(e)}", fg="red"))
  442. click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green"))
  443. @click.command("add-qdrant-index", help="Add Qdrant index.")
  444. @click.option("--field", default="metadata.doc_id", prompt=False, help="Index field , default is metadata.doc_id.")
  445. def add_qdrant_index(field: str):
  446. click.echo(click.style("Starting Qdrant index creation.", fg="green"))
  447. create_count = 0
  448. try:
  449. bindings = db.session.query(DatasetCollectionBinding).all()
  450. if not bindings:
  451. click.echo(click.style("No dataset collection bindings found.", fg="red"))
  452. return
  453. import qdrant_client
  454. from qdrant_client.http.exceptions import UnexpectedResponse
  455. from qdrant_client.http.models import PayloadSchemaType
  456. from core.rag.datasource.vdb.qdrant.qdrant_vector import QdrantConfig
  457. for binding in bindings:
  458. if dify_config.QDRANT_URL is None:
  459. raise ValueError("Qdrant URL is required.")
  460. qdrant_config = QdrantConfig(
  461. endpoint=dify_config.QDRANT_URL,
  462. api_key=dify_config.QDRANT_API_KEY,
  463. root_path=current_app.root_path,
  464. timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
  465. grpc_port=dify_config.QDRANT_GRPC_PORT,
  466. prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
  467. )
  468. try:
  469. client = qdrant_client.QdrantClient(**qdrant_config.to_qdrant_params())
  470. # create payload index
  471. client.create_payload_index(binding.collection_name, field, field_schema=PayloadSchemaType.KEYWORD)
  472. create_count += 1
  473. except UnexpectedResponse as e:
  474. # Collection does not exist, so return
  475. if e.status_code == 404:
  476. click.echo(click.style(f"Collection not found: {binding.collection_name}.", fg="red"))
  477. continue
  478. # Some other error occurred, so re-raise the exception
  479. else:
  480. click.echo(
  481. click.style(
  482. f"Failed to create Qdrant index for collection: {binding.collection_name}.", fg="red"
  483. )
  484. )
  485. except Exception:
  486. click.echo(click.style("Failed to create Qdrant client.", fg="red"))
  487. click.echo(click.style(f"Index creation complete. Created {create_count} collection indexes.", fg="green"))
  488. @click.command("old-metadata-migration", help="Old metadata migration.")
  489. def old_metadata_migration():
  490. """
  491. Old metadata migration.
  492. """
  493. click.echo(click.style("Starting old metadata migration.", fg="green"))
  494. page = 1
  495. while True:
  496. try:
  497. stmt = (
  498. select(DatasetDocument)
  499. .where(DatasetDocument.doc_metadata.is_not(None))
  500. .order_by(DatasetDocument.created_at.desc())
  501. )
  502. documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
  503. except NotFound:
  504. break
  505. if not documents:
  506. break
  507. for document in documents:
  508. if document.doc_metadata:
  509. doc_metadata = document.doc_metadata
  510. for key, value in doc_metadata.items():
  511. for field in BuiltInField:
  512. if field.value == key:
  513. break
  514. else:
  515. dataset_metadata = (
  516. db.session.query(DatasetMetadata)
  517. .where(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
  518. .first()
  519. )
  520. if not dataset_metadata:
  521. dataset_metadata = DatasetMetadata(
  522. tenant_id=document.tenant_id,
  523. dataset_id=document.dataset_id,
  524. name=key,
  525. type="string",
  526. created_by=document.created_by,
  527. )
  528. db.session.add(dataset_metadata)
  529. db.session.flush()
  530. dataset_metadata_binding = DatasetMetadataBinding(
  531. tenant_id=document.tenant_id,
  532. dataset_id=document.dataset_id,
  533. metadata_id=dataset_metadata.id,
  534. document_id=document.id,
  535. created_by=document.created_by,
  536. )
  537. db.session.add(dataset_metadata_binding)
  538. else:
  539. dataset_metadata_binding = (
  540. db.session.query(DatasetMetadataBinding) # type: ignore
  541. .where(
  542. DatasetMetadataBinding.dataset_id == document.dataset_id,
  543. DatasetMetadataBinding.document_id == document.id,
  544. DatasetMetadataBinding.metadata_id == dataset_metadata.id,
  545. )
  546. .first()
  547. )
  548. if not dataset_metadata_binding:
  549. dataset_metadata_binding = DatasetMetadataBinding(
  550. tenant_id=document.tenant_id,
  551. dataset_id=document.dataset_id,
  552. metadata_id=dataset_metadata.id,
  553. document_id=document.id,
  554. created_by=document.created_by,
  555. )
  556. db.session.add(dataset_metadata_binding)
  557. db.session.commit()
  558. page += 1
  559. click.echo(click.style("Old metadata migration completed.", fg="green"))
  560. @click.command("create-tenant", help="Create account and tenant.")
  561. @click.option("--email", prompt=True, help="Tenant account email.")
  562. @click.option("--name", prompt=True, help="Workspace name.")
  563. @click.option("--language", prompt=True, help="Account language, default: en-US.")
  564. def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
  565. """
  566. Create tenant account
  567. """
  568. if not email:
  569. click.echo(click.style("Email is required.", fg="red"))
  570. return
  571. # Create account
  572. email = email.strip()
  573. if "@" not in email:
  574. click.echo(click.style("Invalid email address.", fg="red"))
  575. return
  576. account_name = email.split("@")[0]
  577. if language not in languages:
  578. language = "en-US"
  579. # Validates name encoding for non-Latin characters.
  580. name = name.strip().encode("utf-8").decode("utf-8") if name else None
  581. # generate random password
  582. new_password = secrets.token_urlsafe(16)
  583. # register account
  584. account = RegisterService.register(
  585. email=email,
  586. name=account_name,
  587. password=new_password,
  588. language=language,
  589. create_workspace_required=False,
  590. )
  591. TenantService.create_owner_tenant_if_not_exist(account, name)
  592. click.echo(
  593. click.style(
  594. f"Account and tenant created.\nAccount: {email}\nPassword: {new_password}",
  595. fg="green",
  596. )
  597. )
  598. @click.command("upgrade-db", help="Upgrade the database")
  599. def upgrade_db():
  600. click.echo("Preparing database migration...")
  601. lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
  602. if lock.acquire(blocking=False):
  603. try:
  604. click.echo(click.style("Starting database migration.", fg="green"))
  605. # run db migration
  606. import flask_migrate
  607. flask_migrate.upgrade()
  608. click.echo(click.style("Database migration successful!", fg="green"))
  609. except Exception:
  610. logging.exception("Failed to execute database migration")
  611. finally:
  612. lock.release()
  613. else:
  614. click.echo("Database migration skipped")
  615. @click.command("fix-app-site-missing", help="Fix app related site missing issue.")
  616. def fix_app_site_missing():
  617. """
  618. Fix app related site missing issue.
  619. """
  620. click.echo(click.style("Starting fix for missing app-related sites.", fg="green"))
  621. failed_app_ids = []
  622. while True:
  623. sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
  624. where sites.id is null limit 1000"""
  625. with db.engine.begin() as conn:
  626. rs = conn.execute(sa.text(sql))
  627. processed_count = 0
  628. for i in rs:
  629. processed_count += 1
  630. app_id = str(i.id)
  631. if app_id in failed_app_ids:
  632. continue
  633. try:
  634. app = db.session.query(App).where(App.id == app_id).first()
  635. if not app:
  636. print(f"App {app_id} not found")
  637. continue
  638. tenant = app.tenant
  639. if tenant:
  640. accounts = tenant.get_accounts()
  641. if not accounts:
  642. print(f"Fix failed for app {app.id}")
  643. continue
  644. account = accounts[0]
  645. print(f"Fixing missing site for app {app.id}")
  646. app_was_created.send(app, account=account)
  647. except Exception:
  648. failed_app_ids.append(app_id)
  649. click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red"))
  650. logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
  651. continue
  652. if not processed_count:
  653. break
  654. click.echo(click.style("Fix for missing app-related sites completed successfully!", fg="green"))
  655. @click.command("migrate-data-for-plugin", help="Migrate data for plugin.")
  656. def migrate_data_for_plugin():
  657. """
  658. Migrate data for plugin.
  659. """
  660. click.echo(click.style("Starting migrate data for plugin.", fg="white"))
  661. PluginDataMigration.migrate()
  662. click.echo(click.style("Migrate data for plugin completed.", fg="green"))
  663. @click.command("extract-plugins", help="Extract plugins.")
  664. @click.option("--output_file", prompt=True, help="The file to store the extracted plugins.", default="plugins.jsonl")
  665. @click.option("--workers", prompt=True, help="The number of workers to extract plugins.", default=10)
  666. def extract_plugins(output_file: str, workers: int):
  667. """
  668. Extract plugins.
  669. """
  670. click.echo(click.style("Starting extract plugins.", fg="white"))
  671. PluginMigration.extract_plugins(output_file, workers)
  672. click.echo(click.style("Extract plugins completed.", fg="green"))
  673. @click.command("extract-unique-identifiers", help="Extract unique identifiers.")
  674. @click.option(
  675. "--output_file",
  676. prompt=True,
  677. help="The file to store the extracted unique identifiers.",
  678. default="unique_identifiers.json",
  679. )
  680. @click.option(
  681. "--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
  682. )
  683. def extract_unique_plugins(output_file: str, input_file: str):
  684. """
  685. Extract unique plugins.
  686. """
  687. click.echo(click.style("Starting extract unique plugins.", fg="white"))
  688. PluginMigration.extract_unique_plugins_to_file(input_file, output_file)
  689. click.echo(click.style("Extract unique plugins completed.", fg="green"))
  690. @click.command("install-plugins", help="Install plugins.")
  691. @click.option(
  692. "--input_file", prompt=True, help="The file to store the extracted unique identifiers.", default="plugins.jsonl"
  693. )
  694. @click.option(
  695. "--output_file", prompt=True, help="The file to store the installed plugins.", default="installed_plugins.jsonl"
  696. )
  697. @click.option("--workers", prompt=True, help="The number of workers to install plugins.", default=100)
  698. def install_plugins(input_file: str, output_file: str, workers: int):
  699. """
  700. Install plugins.
  701. """
  702. click.echo(click.style("Starting install plugins.", fg="white"))
  703. PluginMigration.install_plugins(input_file, output_file, workers)
  704. click.echo(click.style("Install plugins completed.", fg="green"))
  705. @click.command("clear-free-plan-tenant-expired-logs", help="Clear free plan tenant expired logs.")
  706. @click.option("--days", prompt=True, help="The days to clear free plan tenant expired logs.", default=30)
  707. @click.option("--batch", prompt=True, help="The batch size to clear free plan tenant expired logs.", default=100)
  708. @click.option(
  709. "--tenant_ids",
  710. prompt=True,
  711. multiple=True,
  712. help="The tenant ids to clear free plan tenant expired logs.",
  713. )
  714. def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[str]):
  715. """
  716. Clear free plan tenant expired logs.
  717. """
  718. click.echo(click.style("Starting clear free plan tenant expired logs.", fg="white"))
  719. ClearFreePlanTenantExpiredLogs.process(days, batch, tenant_ids)
  720. click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green"))
  721. @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
  722. @click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
  723. def clear_orphaned_file_records(force: bool):
  724. """
  725. Clear orphaned file records in the database.
  726. """
  727. # define tables and columns to process
  728. files_tables = [
  729. {"table": "upload_files", "id_column": "id", "key_column": "key"},
  730. {"table": "tool_files", "id_column": "id", "key_column": "file_key"},
  731. ]
  732. ids_tables = [
  733. {"type": "uuid", "table": "message_files", "column": "upload_file_id"},
  734. {"type": "text", "table": "documents", "column": "data_source_info"},
  735. {"type": "text", "table": "document_segments", "column": "content"},
  736. {"type": "text", "table": "messages", "column": "answer"},
  737. {"type": "text", "table": "workflow_node_executions", "column": "inputs"},
  738. {"type": "text", "table": "workflow_node_executions", "column": "process_data"},
  739. {"type": "text", "table": "workflow_node_executions", "column": "outputs"},
  740. {"type": "text", "table": "conversations", "column": "introduction"},
  741. {"type": "text", "table": "conversations", "column": "system_instruction"},
  742. {"type": "text", "table": "accounts", "column": "avatar"},
  743. {"type": "text", "table": "apps", "column": "icon"},
  744. {"type": "text", "table": "sites", "column": "icon"},
  745. {"type": "json", "table": "messages", "column": "inputs"},
  746. {"type": "json", "table": "messages", "column": "message"},
  747. ]
  748. # notify user and ask for confirmation
  749. click.echo(
  750. click.style(
  751. "This command will first find and delete orphaned file records from the message_files table,", fg="yellow"
  752. )
  753. )
  754. click.echo(
  755. click.style(
  756. "and then it will find and delete orphaned file records in the following tables:",
  757. fg="yellow",
  758. )
  759. )
  760. for files_table in files_tables:
  761. click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
  762. click.echo(
  763. click.style("The following tables and columns will be scanned to find orphaned file records:", fg="yellow")
  764. )
  765. for ids_table in ids_tables:
  766. click.echo(click.style(f"- {ids_table['table']} ({ids_table['column']})", fg="yellow"))
  767. click.echo("")
  768. click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
  769. click.echo(
  770. click.style(
  771. (
  772. "Since not all patterns have been fully tested, "
  773. "please note that this command may delete unintended file records."
  774. ),
  775. fg="yellow",
  776. )
  777. )
  778. click.echo(
  779. click.style("This cannot be undone. Please make sure to back up your database before proceeding.", fg="yellow")
  780. )
  781. click.echo(
  782. click.style(
  783. (
  784. "It is also recommended to run this during the maintenance window, "
  785. "as this may cause high load on your instance."
  786. ),
  787. fg="yellow",
  788. )
  789. )
  790. if not force:
  791. click.confirm("Do you want to proceed?", abort=True)
  792. # start the cleanup process
  793. click.echo(click.style("Starting orphaned file records cleanup.", fg="white"))
  794. # clean up the orphaned records in the message_files table where message_id doesn't exist in messages table
  795. try:
  796. click.echo(
  797. click.style("- Listing message_files records where message_id doesn't exist in messages table", fg="white")
  798. )
  799. query = (
  800. "SELECT mf.id, mf.message_id "
  801. "FROM message_files mf LEFT JOIN messages m ON mf.message_id = m.id "
  802. "WHERE m.id IS NULL"
  803. )
  804. orphaned_message_files = []
  805. with db.engine.begin() as conn:
  806. rs = conn.execute(sa.text(query))
  807. for i in rs:
  808. orphaned_message_files.append({"id": str(i[0]), "message_id": str(i[1])})
  809. if orphaned_message_files:
  810. click.echo(click.style(f"Found {len(orphaned_message_files)} orphaned message_files records:", fg="white"))
  811. for record in orphaned_message_files:
  812. click.echo(click.style(f" - id: {record['id']}, message_id: {record['message_id']}", fg="black"))
  813. if not force:
  814. click.confirm(
  815. (
  816. f"Do you want to proceed "
  817. f"to delete all {len(orphaned_message_files)} orphaned message_files records?"
  818. ),
  819. abort=True,
  820. )
  821. click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
  822. query = "DELETE FROM message_files WHERE id IN :ids"
  823. with db.engine.begin() as conn:
  824. conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
  825. click.echo(
  826. click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
  827. )
  828. else:
  829. click.echo(click.style("No orphaned message_files records found. There is nothing to delete.", fg="green"))
  830. except Exception as e:
  831. click.echo(click.style(f"Error deleting orphaned message_files records: {str(e)}", fg="red"))
  832. # clean up the orphaned records in the rest of the *_files tables
  833. try:
  834. # fetch file id and keys from each table
  835. all_files_in_tables = []
  836. for files_table in files_tables:
  837. click.echo(click.style(f"- Listing file records in table {files_table['table']}", fg="white"))
  838. query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}"
  839. with db.engine.begin() as conn:
  840. rs = conn.execute(sa.text(query))
  841. for i in rs:
  842. all_files_in_tables.append({"table": files_table["table"], "id": str(i[0]), "key": i[1]})
  843. click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
  844. # fetch referred table and columns
  845. guid_regexp = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"
  846. all_ids_in_tables = []
  847. for ids_table in ids_tables:
  848. query = ""
  849. if ids_table["type"] == "uuid":
  850. click.echo(
  851. click.style(
  852. f"- Listing file ids in column {ids_table['column']} in table {ids_table['table']}", fg="white"
  853. )
  854. )
  855. query = (
  856. f"SELECT {ids_table['column']} FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL"
  857. )
  858. with db.engine.begin() as conn:
  859. rs = conn.execute(sa.text(query))
  860. for i in rs:
  861. all_ids_in_tables.append({"table": ids_table["table"], "id": str(i[0])})
  862. elif ids_table["type"] == "text":
  863. click.echo(
  864. click.style(
  865. f"- Listing file-id-like strings in column {ids_table['column']} in table {ids_table['table']}",
  866. fg="white",
  867. )
  868. )
  869. query = (
  870. f"SELECT regexp_matches({ids_table['column']}, '{guid_regexp}', 'g') AS extracted_id "
  871. f"FROM {ids_table['table']}"
  872. )
  873. with db.engine.begin() as conn:
  874. rs = conn.execute(sa.text(query))
  875. for i in rs:
  876. for j in i[0]:
  877. all_ids_in_tables.append({"table": ids_table["table"], "id": j})
  878. elif ids_table["type"] == "json":
  879. click.echo(
  880. click.style(
  881. (
  882. f"- Listing file-id-like JSON string in column {ids_table['column']} "
  883. f"in table {ids_table['table']}"
  884. ),
  885. fg="white",
  886. )
  887. )
  888. query = (
  889. f"SELECT regexp_matches({ids_table['column']}::text, '{guid_regexp}', 'g') AS extracted_id "
  890. f"FROM {ids_table['table']}"
  891. )
  892. with db.engine.begin() as conn:
  893. rs = conn.execute(sa.text(query))
  894. for i in rs:
  895. for j in i[0]:
  896. all_ids_in_tables.append({"table": ids_table["table"], "id": j})
  897. click.echo(click.style(f"Found {len(all_ids_in_tables)} file ids in tables.", fg="white"))
  898. except Exception as e:
  899. click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
  900. return
  901. # find orphaned files
  902. all_files = [file["id"] for file in all_files_in_tables]
  903. all_ids = [file["id"] for file in all_ids_in_tables]
  904. orphaned_files = list(set(all_files) - set(all_ids))
  905. if not orphaned_files:
  906. click.echo(click.style("No orphaned file records found. There is nothing to delete.", fg="green"))
  907. return
  908. click.echo(click.style(f"Found {len(orphaned_files)} orphaned file records.", fg="white"))
  909. for file in orphaned_files:
  910. click.echo(click.style(f"- orphaned file id: {file}", fg="black"))
  911. if not force:
  912. click.confirm(f"Do you want to proceed to delete all {len(orphaned_files)} orphaned file records?", abort=True)
  913. # delete orphaned records for each file
  914. try:
  915. for files_table in files_tables:
  916. click.echo(click.style(f"- Deleting orphaned file records in table {files_table['table']}", fg="white"))
  917. query = f"DELETE FROM {files_table['table']} WHERE {files_table['id_column']} IN :ids"
  918. with db.engine.begin() as conn:
  919. conn.execute(sa.text(query), {"ids": tuple(orphaned_files)})
  920. except Exception as e:
  921. click.echo(click.style(f"Error deleting orphaned file records: {str(e)}", fg="red"))
  922. return
  923. click.echo(click.style(f"Removed {len(orphaned_files)} orphaned file records.", fg="green"))
  924. @click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
  925. @click.command("remove-orphaned-files-on-storage", help="Remove orphaned files on the storage.")
  926. def remove_orphaned_files_on_storage(force: bool):
  927. """
  928. Remove orphaned files on the storage.
  929. """
  930. # define tables and columns to process
  931. files_tables = [
  932. {"table": "upload_files", "key_column": "key"},
  933. {"table": "tool_files", "key_column": "file_key"},
  934. ]
  935. storage_paths = ["image_files", "tools", "upload_files"]
  936. # notify user and ask for confirmation
  937. click.echo(click.style("This command will find and remove orphaned files on the storage,", fg="yellow"))
  938. click.echo(
  939. click.style("by comparing the files on the storage with the records in the following tables:", fg="yellow")
  940. )
  941. for files_table in files_tables:
  942. click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
  943. click.echo(click.style("The following paths on the storage will be scanned to find orphaned files:", fg="yellow"))
  944. for storage_path in storage_paths:
  945. click.echo(click.style(f"- {storage_path}", fg="yellow"))
  946. click.echo("")
  947. click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
  948. click.echo(
  949. click.style(
  950. "Currently, this command will work only for opendal based storage (STORAGE_TYPE=opendal).", fg="yellow"
  951. )
  952. )
  953. click.echo(
  954. click.style(
  955. "Since not all patterns have been fully tested, please note that this command may delete unintended files.",
  956. fg="yellow",
  957. )
  958. )
  959. click.echo(
  960. click.style("This cannot be undone. Please make sure to back up your storage before proceeding.", fg="yellow")
  961. )
  962. click.echo(
  963. click.style(
  964. (
  965. "It is also recommended to run this during the maintenance window, "
  966. "as this may cause high load on your instance."
  967. ),
  968. fg="yellow",
  969. )
  970. )
  971. if not force:
  972. click.confirm("Do you want to proceed?", abort=True)
  973. # start the cleanup process
  974. click.echo(click.style("Starting orphaned files cleanup.", fg="white"))
  975. # fetch file id and keys from each table
  976. all_files_in_tables = []
  977. try:
  978. for files_table in files_tables:
  979. click.echo(click.style(f"- Listing files from table {files_table['table']}", fg="white"))
  980. query = f"SELECT {files_table['key_column']} FROM {files_table['table']}"
  981. with db.engine.begin() as conn:
  982. rs = conn.execute(sa.text(query))
  983. for i in rs:
  984. all_files_in_tables.append(str(i[0]))
  985. click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
  986. except Exception as e:
  987. click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
  988. all_files_on_storage = []
  989. for storage_path in storage_paths:
  990. try:
  991. click.echo(click.style(f"- Scanning files on storage path {storage_path}", fg="white"))
  992. files = storage.scan(path=storage_path, files=True, directories=False)
  993. all_files_on_storage.extend(files)
  994. except FileNotFoundError as e:
  995. click.echo(click.style(f" -> Skipping path {storage_path} as it does not exist.", fg="yellow"))
  996. continue
  997. except Exception as e:
  998. click.echo(click.style(f" -> Error scanning files on storage path {storage_path}: {str(e)}", fg="red"))
  999. continue
  1000. click.echo(click.style(f"Found {len(all_files_on_storage)} files on storage.", fg="white"))
  1001. # find orphaned files
  1002. orphaned_files = list(set(all_files_on_storage) - set(all_files_in_tables))
  1003. if not orphaned_files:
  1004. click.echo(click.style("No orphaned files found. There is nothing to remove.", fg="green"))
  1005. return
  1006. click.echo(click.style(f"Found {len(orphaned_files)} orphaned files.", fg="white"))
  1007. for file in orphaned_files:
  1008. click.echo(click.style(f"- orphaned file: {file}", fg="black"))
  1009. if not force:
  1010. click.confirm(f"Do you want to proceed to remove all {len(orphaned_files)} orphaned files?", abort=True)
  1011. # delete orphaned files
  1012. removed_files = 0
  1013. error_files = 0
  1014. for file in orphaned_files:
  1015. try:
  1016. storage.delete(file)
  1017. removed_files += 1
  1018. click.echo(click.style(f"- Removing orphaned file: {file}", fg="white"))
  1019. except Exception as e:
  1020. error_files += 1
  1021. click.echo(click.style(f"- Error deleting orphaned file {file}: {str(e)}", fg="red"))
  1022. continue
  1023. if error_files == 0:
  1024. click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
  1025. else:
  1026. click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))
  1027. @click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.")
  1028. @click.option("--provider", prompt=True, help="Provider name")
  1029. @click.option("--client-params", prompt=True, help="Client Params")
  1030. def setup_system_tool_oauth_client(provider, client_params):
  1031. """
  1032. Setup system tool oauth client
  1033. """
  1034. provider_id = ToolProviderID(provider)
  1035. provider_name = provider_id.provider_name
  1036. plugin_id = provider_id.plugin_id
  1037. try:
  1038. # json validate
  1039. click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
  1040. client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
  1041. click.echo(click.style("Client params validated successfully.", fg="green"))
  1042. click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
  1043. click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
  1044. oauth_client_params = encrypt_system_oauth_params(client_params_dict)
  1045. click.echo(click.style("Client params encrypted successfully.", fg="green"))
  1046. except Exception as e:
  1047. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1048. return
  1049. deleted_count = (
  1050. db.session.query(ToolOAuthSystemClient)
  1051. .filter_by(
  1052. provider=provider_name,
  1053. plugin_id=plugin_id,
  1054. )
  1055. .delete()
  1056. )
  1057. if deleted_count > 0:
  1058. click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
  1059. oauth_client = ToolOAuthSystemClient(
  1060. provider=provider_name,
  1061. plugin_id=plugin_id,
  1062. encrypted_oauth_params=oauth_client_params,
  1063. )
  1064. db.session.add(oauth_client)
  1065. db.session.commit()
  1066. click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
  1067. @click.command("setup-datasource-oauth-client", help="Setup datasource oauth client.")
  1068. @click.option("--provider", prompt=True, help="Provider name")
  1069. @click.option("--client-params", prompt=True, help="Client Params")
  1070. def setup_datasource_oauth_client(provider, client_params):
  1071. """
  1072. Setup datasource oauth client
  1073. """
  1074. provider_id = DatasourceProviderID(provider)
  1075. provider_name = provider_id.provider_name
  1076. plugin_id = provider_id.plugin_id
  1077. try:
  1078. # json validate
  1079. click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
  1080. client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
  1081. click.echo(click.style("Client params validated successfully.", fg="green"))
  1082. except Exception as e:
  1083. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1084. return
  1085. click.echo(click.style(f"Ready to delete existing oauth client params: {provider_name}", fg="yellow"))
  1086. deleted_count = (
  1087. db.session.query(DatasourceOauthParamConfig)
  1088. .filter_by(
  1089. provider=provider_name,
  1090. plugin_id=plugin_id,
  1091. )
  1092. .delete()
  1093. )
  1094. if deleted_count > 0:
  1095. click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
  1096. click.echo(click.style(f"Ready to setup datasource oauth client: {provider_name}", fg="yellow"))
  1097. oauth_client = DatasourceOauthParamConfig(
  1098. provider=provider_name,
  1099. plugin_id=plugin_id,
  1100. system_credentials=client_params_dict,
  1101. )
  1102. db.session.add(oauth_client)
  1103. db.session.commit()
  1104. click.echo(click.style(f"provider: {provider_name}", fg="green"))
  1105. click.echo(click.style(f"plugin_id: {plugin_id}", fg="green"))
  1106. click.echo(click.style(f"params: {json.dumps(client_params_dict, indent=2, ensure_ascii=False)}", fg="green"))
  1107. click.echo(click.style(f"Datasource oauth client setup successfully. id: {oauth_client.id}", fg="green"))
  1108. @click.command("transform-datasource-credentials", help="Transform datasource credentials.")
  1109. def transform_datasource_credentials():
  1110. """
  1111. Transform datasource credentials
  1112. """
  1113. try:
  1114. installer_manager = PluginInstaller()
  1115. plugin_migration = PluginMigration()
  1116. notion_plugin_id = "langgenius/notion_datasource"
  1117. firecrawl_plugin_id = "langgenius/firecrawl_datasource"
  1118. jina_plugin_id = "langgenius/jina_datasource"
  1119. notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id)
  1120. firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id)
  1121. jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id)
  1122. oauth_credential_type = CredentialType.OAUTH2
  1123. api_key_credential_type = CredentialType.API_KEY
  1124. # deal notion credentials
  1125. deal_notion_count = 0
  1126. notion_credentials = db.session.query(DataSourceOauthBinding).filter_by(provider="notion").all()
  1127. notion_credentials_tenant_mapping: dict[str, list[DataSourceOauthBinding]] = {}
  1128. for credential in notion_credentials:
  1129. tenant_id = credential.tenant_id
  1130. if tenant_id not in notion_credentials_tenant_mapping:
  1131. notion_credentials_tenant_mapping[tenant_id] = []
  1132. notion_credentials_tenant_mapping[tenant_id].append(credential)
  1133. for tenant_id, credentials in notion_credentials_tenant_mapping.items():
  1134. # check notion plugin is installed
  1135. installed_plugins = installer_manager.list_plugins(tenant_id)
  1136. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1137. if notion_plugin_id not in installed_plugins_ids:
  1138. if notion_plugin_unique_identifier:
  1139. # install notion plugin
  1140. installer_manager.install_from_identifiers(
  1141. tenant_id,
  1142. [notion_plugin_unique_identifier],
  1143. PluginInstallationSource.Marketplace,
  1144. metas=[
  1145. {
  1146. "plugin_unique_identifier": notion_plugin_unique_identifier,
  1147. }
  1148. ],
  1149. )
  1150. auth_count = 0
  1151. for credential in credentials:
  1152. auth_count += 1
  1153. # get credential oauth params
  1154. access_token = credential.access_token
  1155. # notion info
  1156. notion_info = credential.source_info
  1157. workspace_id = notion_info.get("workspace_id")
  1158. workspace_name = notion_info.get("workspace_name")
  1159. workspace_icon = notion_info.get("workspace_icon")
  1160. new_credentials = {
  1161. "integration_secret": encrypter.encrypt_token(tenant_id, access_token),
  1162. "workspace_id": workspace_id,
  1163. "workspace_name": workspace_name,
  1164. "workspace_icon": workspace_icon,
  1165. }
  1166. datasource_provider = DatasourceProvider(
  1167. provider="notion",
  1168. tenant_id=tenant_id,
  1169. plugin_id=notion_plugin_id,
  1170. auth_type=oauth_credential_type.value,
  1171. encrypted_credentials=new_credentials,
  1172. name=f"Auth {auth_count}",
  1173. avatar_url=workspace_icon or "default",
  1174. is_default=False,
  1175. )
  1176. db.session.add(datasource_provider)
  1177. deal_notion_count += 1
  1178. db.session.commit()
  1179. # deal firecrawl credentials
  1180. deal_firecrawl_count = 0
  1181. firecrawl_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="firecrawl").all()
  1182. firecrawl_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
  1183. for credential in firecrawl_credentials:
  1184. tenant_id = credential.tenant_id
  1185. if tenant_id not in firecrawl_credentials_tenant_mapping:
  1186. firecrawl_credentials_tenant_mapping[tenant_id] = []
  1187. firecrawl_credentials_tenant_mapping[tenant_id].append(credential)
  1188. for tenant_id, credentials in firecrawl_credentials_tenant_mapping.items():
  1189. # check firecrawl plugin is installed
  1190. installed_plugins = installer_manager.list_plugins(tenant_id)
  1191. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1192. if firecrawl_plugin_id not in installed_plugins_ids:
  1193. if firecrawl_plugin_unique_identifier:
  1194. # install firecrawl plugin
  1195. installer_manager.install_from_identifiers(
  1196. tenant_id,
  1197. [firecrawl_plugin_unique_identifier],
  1198. PluginInstallationSource.Marketplace,
  1199. metas=[
  1200. {
  1201. "plugin_unique_identifier": firecrawl_plugin_unique_identifier,
  1202. }
  1203. ],
  1204. )
  1205. auth_count = 0
  1206. for credential in credentials:
  1207. auth_count += 1
  1208. # get credential api key
  1209. api_key = credential.credentials.get("config", {}).get("api_key")
  1210. base_url = credential.credentials.get("config", {}).get("base_url")
  1211. new_credentials = {
  1212. "firecrawl_api_key": api_key,
  1213. "base_url": base_url,
  1214. }
  1215. datasource_provider = DatasourceProvider(
  1216. provider="firecrawl",
  1217. tenant_id=tenant_id,
  1218. plugin_id=firecrawl_plugin_id,
  1219. auth_type=api_key_credential_type.value,
  1220. encrypted_credentials=new_credentials,
  1221. name=f"Auth {auth_count}",
  1222. avatar_url="default",
  1223. is_default=False,
  1224. )
  1225. db.session.add(datasource_provider)
  1226. deal_firecrawl_count += 1
  1227. db.session.commit()
  1228. # deal jina credentials
  1229. deal_jina_count = 0
  1230. jina_credentials = db.session.query(DataSourceApiKeyAuthBinding).filter_by(provider="jina").all()
  1231. jina_credentials_tenant_mapping: dict[str, list[DataSourceApiKeyAuthBinding]] = {}
  1232. for credential in jina_credentials:
  1233. tenant_id = credential.tenant_id
  1234. if tenant_id not in jina_credentials_tenant_mapping:
  1235. jina_credentials_tenant_mapping[tenant_id] = []
  1236. jina_credentials_tenant_mapping[tenant_id].append(credential)
  1237. for tenant_id, credentials in jina_credentials_tenant_mapping.items():
  1238. # check jina plugin is installed
  1239. installed_plugins = installer_manager.list_plugins(tenant_id)
  1240. installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins]
  1241. if jina_plugin_id not in installed_plugins_ids:
  1242. if jina_plugin_unique_identifier:
  1243. # install jina plugin
  1244. installer_manager.install_from_identifiers(
  1245. tenant_id,
  1246. [jina_plugin_unique_identifier],
  1247. PluginInstallationSource.Marketplace,
  1248. metas=[
  1249. {
  1250. "plugin_unique_identifier": jina_plugin_unique_identifier,
  1251. }
  1252. ],
  1253. )
  1254. auth_count = 0
  1255. for credential in credentials:
  1256. auth_count += 1
  1257. # get credential api key
  1258. api_key = credential.credentials.get("config", {}).get("api_key")
  1259. new_credentials = {
  1260. "integration_secret": api_key,
  1261. }
  1262. datasource_provider = DatasourceProvider(
  1263. provider="jina",
  1264. tenant_id=tenant_id,
  1265. plugin_id=jina_plugin_id,
  1266. auth_type=api_key_credential_type.value,
  1267. encrypted_credentials=new_credentials,
  1268. name=f"Auth {auth_count}",
  1269. avatar_url="default",
  1270. is_default=False,
  1271. )
  1272. db.session.add(datasource_provider)
  1273. deal_jina_count += 1
  1274. db.session.commit()
  1275. except Exception as e:
  1276. click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
  1277. return
  1278. click.echo(click.style(f"Transforming notion successfully. deal_notion_count: {deal_notion_count}", fg="green"))
  1279. click.echo(
  1280. click.style(f"Transforming firecrawl successfully. deal_firecrawl_count: {deal_firecrawl_count}", fg="green")
  1281. )
  1282. click.echo(click.style(f"Transforming jina successfully. deal_jina_count: {deal_jina_count}", fg="green"))