| @@ -157,7 +157,7 @@ class PipelineGenerator(BaseAppGenerator): | |||
| stream=streaming, | |||
| invoke_from=invoke_from, | |||
| call_depth=call_depth, | |||
| workflow_run_id=workflow_run_id, | |||
| workflow_execution_id=workflow_run_id, | |||
| ) | |||
| contexts.plugin_tool_providers.set({}) | |||
| @@ -379,7 +379,7 @@ class PipelineGenerator(BaseAppGenerator): | |||
| stream=streaming, | |||
| invoke_from=InvokeFrom.DEBUGGER, | |||
| call_depth=0, | |||
| workflow_run_id=str(uuid.uuid4()), | |||
| workflow_execution_id=str(uuid.uuid4()), | |||
| ) | |||
| contexts.plugin_tool_providers.set({}) | |||
| contexts.plugin_tool_providers_lock.set(threading.Lock()) | |||
| @@ -461,7 +461,7 @@ class PipelineGenerator(BaseAppGenerator): | |||
| invoke_from=InvokeFrom.DEBUGGER, | |||
| extras={"auto_generate_conversation_name": False}, | |||
| single_loop_run=RagPipelineGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]), | |||
| workflow_run_id=str(uuid.uuid4()), | |||
| workflow_execution_id=str(uuid.uuid4()), | |||
| ) | |||
| contexts.plugin_tool_providers.set({}) | |||
| contexts.plugin_tool_providers_lock.set(threading.Lock()) | |||
| @@ -29,6 +29,7 @@ class PipelineDataset(BaseModel): | |||
| class PipelineDocument(BaseModel): | |||
| id: str | |||
| position: int | |||
| data_source_type: str | |||
| data_source_info: Optional[dict] = None | |||
| name: str | |||
| indexing_status: str | |||
| @@ -0,0 +1,35 @@ | |||
| """add_pipeline_info_5 | |||
| Revision ID: d466c551816f | |||
| Revises: e4fb49a4fe86 | |||
| Create Date: 2025-06-05 13:56:05.962215 | |||
| """ | |||
| from alembic import op | |||
| import models as models | |||
| import sqlalchemy as sa | |||
| # revision identifiers, used by Alembic. | |||
| revision = 'd466c551816f' | |||
| down_revision = 'e4fb49a4fe86' | |||
| branch_labels = None | |||
| depends_on = None | |||
| def upgrade(): | |||
| # ### commands auto generated by Alembic - please adjust! ### | |||
| with op.batch_alter_table('datasource_providers', schema=None) as batch_op: | |||
| batch_op.drop_constraint(batch_op.f('datasource_provider_plugin_id_provider_idx'), type_='unique') | |||
| batch_op.create_index('datasource_provider_auth_type_provider_idx', ['tenant_id', 'plugin_id', 'provider'], unique=False) | |||
| # ### end Alembic commands ### | |||
| def downgrade(): | |||
| # ### commands auto generated by Alembic - please adjust! ### | |||
| with op.batch_alter_table('datasource_providers', schema=None) as batch_op: | |||
| batch_op.drop_index('datasource_provider_auth_type_provider_idx') | |||
| batch_op.create_unique_constraint(batch_op.f('datasource_provider_plugin_id_provider_idx'), ['plugin_id', 'provider']) | |||
| # ### end Alembic commands ### | |||
| @@ -25,7 +25,7 @@ class DatasourceProvider(Base): | |||
| __tablename__ = "datasource_providers" | |||
| __table_args__ = ( | |||
| db.PrimaryKeyConstraint("id", name="datasource_provider_pkey"), | |||
| db.UniqueConstraint("plugin_id", "provider", name="datasource_provider_auth_type_provider_idx"), | |||
| db.Index("datasource_provider_auth_type_provider_idx", "tenant_id", "plugin_id", "provider"), | |||
| ) | |||
| id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) | |||
| tenant_id = db.Column(StringUUID, nullable=False) | |||
| @@ -104,7 +104,7 @@ class DatasourceProviderService: | |||
| for datasource_provider in datasource_providers: | |||
| encrypted_credentials = datasource_provider.encrypted_credentials | |||
| # Get provider credential secret variables | |||
| credential_secret_variables = self.extract_secret_variables(tenant_id=tenant_id, provider_id=provider) | |||
| credential_secret_variables = self.extract_secret_variables(tenant_id=tenant_id, provider_id=f"{plugin_id}/{provider}") | |||
| # Obfuscate provider credentials | |||
| copy_credentials = encrypted_credentials.copy() | |||