From 08f61d83508107cc294eb6bfb1477e14be12d465 Mon Sep 17 00:00:00 2001 From: vizsatiz Date: Thu, 4 Dec 2025 13:46:38 +0530 Subject: [PATCH 01/13] First commit for wavefront backend --- .../call_processing/__init__.py | 0 .../call_processing/cache/cache_manager.py | 206 + .../call_processing/cache/cache_utils.py | 28 + .../call_processing/config.ini | 6 + .../call_processing/constants/__init__.py | 1 + .../constants/api_endpoints.py | 20 + .../call_processing/constants/auth.py | 10 + .../controllers/cache_controller.py | 211 + .../controllers/webhook_controller.py | 186 + .../di/application_container.py | 31 + .../call_processing/log/logger.py | 42 + .../call_processing/call_processing/server.py | 117 + .../services/floware_http_client.py | 125 + .../call_processing/services/llm_service.py | 146 + .../services/pipecat_service.py | 111 + .../call_processing/services/stt_service.py | 119 + .../call_processing/services/tts_service.py | 166 + .../services/voice_agent_cache_service.py | 264 + .../apps/call_processing/pyproject.toml | 34 + .../apps/floconsole/floconsole/__init__.py | 0 .../floconsole/authorization/require_auth.py | 136 + .../apps/floconsole/floconsole/config.ini | 22 + .../floconsole/floconsole/constants/app.py | 12 + .../floconsole/floconsole/constants/auth.py | 10 + .../floconsole/controllers/__init__.py | 1 + .../floconsole/controllers/app_controller.py | 394 + .../floconsole/controllers/auth_controller.py | 122 + .../controllers/floware_proxy_controller.py | 171 + .../floconsole/controllers/user_controller.py | 97 + .../apps/floconsole/floconsole/db/__init__.py | 16 + .../apps/floconsole/floconsole/db/alembic.ini | 82 + .../floconsole/floconsole/db/alembic/env.py | 87 + .../floconsole/db/alembic/script.py.mako | 24 + ..._18_1543-73bcd253dd62_create_user_table.py | 40 + ..._1543-e3a2fa91cda2_create_session_table.py | 40 + ...5_08_18_1554-521ae4960bcf_add_seed_user.py | 73 + ...8_21_1251-ac10dc573599_create_app_table.py | 42 + ..._14_1514-e9a4691e0732_update_apps_table.py | 41 + ...3ba0ace_adding_type_column_to_app_table.py | 40 + .../apps/floconsole/floconsole/db/base.py | 3 + .../floconsole/floconsole/db/connection.py | 45 + .../floconsole/db/models/__init__.py | 7 + .../floconsole/floconsole/db/models/app.py | 42 + .../floconsole/db/models/session.py | 28 + .../floconsole/floconsole/db/models/user.py | 40 + .../floconsole/db/repositories/__init__.py | 1 + .../db/repositories/sql_alchemy_repository.py | 228 + .../floconsole/di/application_container.py | 82 + .../apps/floconsole/floconsole/server.py | 170 + .../floconsole/services/app_service.py | 87 + .../services/floware_proxy_service.py | 210 + .../floconsole/services/token_service.py | 158 + .../floconsole/utils/password_utils.py | 13 + .../floconsole/floconsole/utils/user_utils.py | 11 + .../server/apps/floconsole/pyproject.toml | 37 + .../server/apps/floware/floware/__init__.py | 0 .../server/apps/floware/floware/channels.py | 49 + .../server/apps/floware/floware/config.ini | 173 + .../floware/floware/controllers/__init__.py | 0 .../floware/controllers/config_controller.py | 99 + .../controllers/notification_controller.py | 74 + .../floware/floware/decorators/with_lock.py | 41 + .../floware/di/application_container.py | 41 + .../floware/floware/middleware/__init__.py | 7 + .../floware/middleware/security_headers.py | 177 + .../server/apps/floware/floware/server.py | 579 ++ .../floware/services/config_service.py | 81 + .../floware/services/notification_service.py | 31 + .../floware/floware/utils/network_utils.py | 7 + .../server/apps/floware/floware/utils/yaml.py | 10 + wavefront/server/apps/floware/pyproject.toml | 60 + .../server/apps/floware/tests/conftest.py | 285 + .../floware/tests/test_config_controller.py | 120 + .../inference_app/inference_app/__init__.py | 0 .../inference_app/inference_app/config.ini | 8 + .../inference_app/controllers/__init__.py | 0 .../controllers/inference_controller.py | 187 + .../inference_app/inference_app_container.py | 31 + .../inference_app/inference_app/server.py | 120 + .../inference_app/service/__init__.py | 0 .../inference_app/service/image_analyser.py | 22 + .../inference_app/service/image_embedding.py | 71 + .../inference_app/service/model_inference.py | 99 + .../inference_app/service/model_repository.py | 68 + .../inference_app/utils/__init__.py | 0 .../inference_app/utils/image_utils.py | 34 + .../server/apps/inference_app/pyproject.toml | 36 + .../rag_ingestion/pyproject.toml | 34 + .../rag_ingestion/rag_ingestion/__init__.py | 0 .../rag_ingestion/constants/__init__.py | 0 .../rag_ingestion/constants/auth.py | 10 + .../rag_ingestion/embeddings/__init__.py | 0 .../rag_ingestion/embeddings/embed.py | 44 + .../rag_ingestion/embeddings/image_embed.py | 71 + .../rag_ingestion/rag_ingestion/env.py | 11 + .../rag_ingestion/rag_ingestion/main.py | 42 + .../rag_ingestion/models/__init__.py | 0 .../rag_ingestion/models/doc_content.py | 10 + .../models/knowledge_base_embeddings.py | 19 + .../rag_ingestion/models/rag_message.py | 15 + .../rag_ingestion/processors/__init__.py | 0 .../processors/file_processor.py | 48 + .../processors/kb_storage_processor.py | 148 + .../rag_ingestion/service/__init__.py | 0 .../rag_ingestion/service/kb_rag_storage.py | 431 ++ .../rag_ingestion/stream/__init__.py | 0 .../rag_ingestion/stream/queue_message.py | 42 + .../rag_ingestion/stream/rag_streamer.py | 38 + .../workflow_job/pyproject.toml | 28 + .../workflow_job/workflow_job/__init__.py | 0 .../workflow_job/workflow_job/config.ini | 26 + .../workflow_job/constants/__init__.py | 1 + .../workflow_job/constants/auth.py | 10 + .../workflow_job/workflow_job/main.py | 103 + .../workflow_job/workflow_job/models.py | 7 + .../workflow_job/workflow_listener.py | 19 + .../workflow_job/workflow_processor.py | 181 + wavefront/server/docker-compose.yml | 34 + .../agents_module/agents_container.py | 77 + .../controllers/agent_controller.py | 439 ++ .../controllers/namespace_controller.py | 43 + .../controllers/workflow_controller.py | 673 ++ .../workflow_pipeline_controller.py | 323 + .../controllers/workflow_runs.py | 199 + .../agents_module/models/agent_schemas.py | 80 + .../agents_module/models/workflow_schemas.py | 106 + .../services/agent_crud_service.py | 461 ++ .../services/agent_inference_service.py | 255 + .../services/namespace_service.py | 176 + .../services/workflow_crud_service.py | 516 ++ .../agents_module/services/workflow_events.py | 136 + .../services/workflow_inference_service.py | 388 + .../agents_module/utils/agent_utils.py | 32 + .../agents_module/utils/auth_utils.py | 26 + .../agents_module/utils/cache_utils.py | 46 + .../utils/input_processing_utils.py | 151 + .../agents_module/utils/validation_utils.py | 28 + .../agents_module/utils/workflow_utils.py | 48 + .../modules/agents_module/pyproject.toml | 37 + .../tests/test_input_processing_utils.py | 440 ++ .../modules/api_services_module/README.md | 361 + .../api_services_module/__init__.py | 44 + .../api_services_container.py | 144 + .../api_services_module/auth/__init__.py | 1 + .../api_services_module/auth/handlers.py | 120 + .../api_services_module/auth/manager.py | 62 + .../api_services_module/config/__init__.py | 1 + .../api_services_module/config/parser.py | 154 + .../api_services_module/config/registry.py | 170 + .../api_services_module/core/__init__.py | 1 + .../api_services_module/core/manager.py | 101 + .../api_services_module/core/proxy.py | 385 + .../api_services_module/core/router.py | 478 ++ .../api_services_module/env.py | 3 + .../api_services_module/execution/execute.py | 81 + .../api_services_module/models/__init__.py | 1 + .../api_services_module/models/pipeline.py | 184 + .../api_services_module/models/service.py | 122 + .../api_services_module/pipeline/__init__.py | 1 + .../api_services_module/pipeline/builder.py | 154 + .../api_services_module/pipeline/stages.py | 447 ++ .../utils/api_change_processor.py | 38 + .../utils/api_change_publisher.py | 24 + .../api_services_module/pyproject.toml | 36 + .../api_services_module/tests/__init__.py | 1 + .../api_services_module/tests/conftest.py | 724 ++ .../api_services_module/tests/test_example.py | 341 + .../test_integration_with_mock_backend.py | 765 ++ .../tests/test_service_deletion.py | 175 + .../tests/test_simple_integration.py | 581 ++ .../auth_module/auth_module/auth_container.py | 88 + .../controllers/hmac_controller.py | 48 + .../controllers/outlook_controller.py | 225 + .../controllers/superset_controller.py | 94 + .../services/client_token_service.py | 44 + .../auth_module/services/outlook_service.py | 343 + .../auth_module/services/superset_service.py | 132 + .../auth_module/services/token_service.py | 139 + .../server/modules/auth_module/pyproject.toml | 40 + .../modules/auth_module/tests/conftest.py | 179 + .../auth_module/tests/data/idp_metadata.xml | 36 + .../modules/auth_module/tests/data/key_gen.sh | 4 + .../tests/data/test_private_key.pem | 51 + .../tests/data/test_public_key.pem | 13 + .../auth_module/tests/fixtures/__init__.py | 0 .../tests/fixtures/keys/private_key.pem | 51 + .../tests/fixtures/keys/public_key.pem | 13 + .../auth_module/tests/fixtures/test_keys.py | 20 + .../tests/test_superset_controller.py | 305 + .../common_module/common_cache.py | 23 + .../common_module/common_container.py | 26 + .../common_module/feature/feature_flag.py | 31 + .../common_module/common_module/log/logger.py | 48 + .../common_module/middleware/__init__.py | 3 + .../middleware/request_id_middleware.py | 73 + .../common_module/models/response.py | 34 + .../prometheus/prometheus_middleware.py | 120 + .../common_module/response_formatter.py | 16 + .../common_module/common_module/scheduler.py | 157 + .../common_module/security/__init__.py | 5 + .../common_module/security/bearer_auth.py | 32 + .../common_module/utils/odata_parser.py | 120 + .../common_module/utils/serializer.py | 33 + .../modules/common_module/pyproject.toml | 35 + .../modules/common_module/tests/conftest.py | 59 + .../common_module/tests/test_odata_parser.py | 184 + .../tests/test_request_id_middleware.py | 243 + .../db_repo_module/db_repo_module/alembic.ini | 82 + .../db_repo_module/alembic/env.py | 147 + .../db_repo_module/alembic/script.py.mako | 26 + ...b7ce8e5b03_create_a_baseline_migrations.py | 138 + ...7c4ba1a32fe_initializing_the_role_table.py | 98 + ...1030-756caddfb44b_truncating_role_table.py | 58 + ...12-01a4c5202566_hash_existing_passwords.py | 45 + ...33-c7800bd1d9c3_for_actionable_insights.py | 49 + ...create_signal_name_in_actionable_alerts.py | 28 + ...d46_remove_all_wrongly_generated_alerts.py | 43 + ...8_1602-78655faf6488_adding_notification.py | 56 + ...025_03_25_1855-9b10292a95eb_rbac_tables.py | 238 + ..._1715-36703628c7a6_adding_cacade_delete.py | 93 + ..._created_actionable_insight_query_table.py | 54 + ..._actionable_alerts_and_query_migrations.py | 79 + ...fba41ef64_updated_knowledge_base_tables.py | 99 + ...29_1345-053823285206_create_leads_table.py | 48 + ...ca43b31d_renaming_column_in_leads_table.py | 33 + ...13558d60_created_the_kb_inference_table.py | 51 + ...e_updated_the_knowledge_base_embeddings.py | 27 + ...25_05_24_1725-ba1f66ca0228_user_session.py | 48 + ...25_05_25_2103-0da695688814_cascade_rbac.py | 72 + ..._updated_the_knowledge_base_tables_for_.py | 32 + ...936-827b9d399023_add_auth_secrets_table.py | 42 + ...418-0db19a0af2af_added_datasource_table.py | 46 + ...1b2c3d4e5f7_create_authenticators_table.py | 95 + ...faa4a3665_create_product_analysis_table.py | 47 + ...dd_account_lockout_fields_to_user_table.py | 45 + ...3_1328-d5caffc321f2_create_config_table.py | 39 + ...b1e6d56_add_last_login_at_to_user_table.py | 29 + ...6e5c42780_created_model_inference_table.py | 46 + ...3a87a_create_llm_inference_config_table.py | 49 + ...3_1230-bb3907e50d30_create_config_table.py | 43 + ...bf901c107c8d_create_image_search_tables.py | 80 + ...10_21_1535-d54e5612306e_workflow_tables.py | 69 + ...dd_new_columns_in_knowledge_base_tables.py | 36 + ...22ec0134dcf8_create_voice_agents_tables.py | 146 + ...pdate_the_metadata_column_in_knowledge_.py | 29 + ..._add_parameters_to_llm_inference_config.py | 91 + ...c85_create_agents_and_namespaces_tables.py | 125 + ...f9dfcda24fb_add_message_processor_table.py | 50 + ...025_11_23_1015-a9a5d624020c_api_service.py | 45 + ...d6_added_config_id_column_in_inference_.py | 42 + ...a0_drop_actionable_alert_insight_leads_.py | 104 + .../db_repo_module/cache/cache_manager.py | 248 + .../db_repo_module/database/base.py | 3 + .../db_repo_module/database/connection.py | 84 + .../db_repo_module/db_repo_container.py | 223 + .../db_repo_module/models/agent.py | 39 + .../db_repo_module/models/api_services.py | 33 + .../db_repo_module/models/auth_secrets.py | 35 + .../db_repo_module/models/authenticator.py | 35 + .../db_repo_module/models/config.py | 25 + .../db_repo_module/models/datasource.py | 47 + .../db_repo_module/models/documents.py | 14 + .../models/dynamic_query_yaml.py | 33 + .../db_repo_module/models/email.py | 16 + .../db_repo_module/models/ikb_models.py | 40 + .../models/image_search_models.py | 64 + .../db_repo_module/models/kb_inferences.py | 42 + .../models/knowledge_base_documents.py | 44 + .../models/knowledge_base_embeddings.py | 39 + .../db_repo_module/models/knowledge_bases.py | 37 + .../models/llm_inference_config.py | 47 + .../models/message_processors.py | 46 + .../db_repo_module/models/model_schema.py | 34 + .../db_repo_module/models/namespace.py | 27 + .../models/notification_users.py | 24 + .../db_repo_module/models/notifications.py | 28 + .../db_repo_module/models/oauth_credential.py | 22 + .../models/product_analytics.py | 42 + .../db_repo_module/models/resource.py | 54 + .../db_repo_module/models/role.py | 26 + .../db_repo_module/models/role_resource.py | 16 + .../db_repo_module/models/saml_config.py | 37 + .../db_repo_module/models/session.py | 29 + .../db_repo_module/models/stt_config.py | 56 + .../db_repo_module/models/task.py | 19 + .../db_repo_module/models/team.py | 16 + .../db_repo_module/models/telephony_config.py | 62 + .../db_repo_module/models/tts_config.py | 57 + .../db_repo_module/models/user.py | 51 + .../db_repo_module/models/user_role.py | 16 + .../db_repo_module/models/voice_agent.py | 62 + .../db_repo_module/models/workflow.py | 39 + .../models/workflow_pipeline.py | 52 + .../db_repo_module/models/workflow_runs.py | 46 + .../repositories/sql_alchemy_repository.py | 226 + .../modules/db_repo_module/pyproject.toml | 32 + .../controllers/image_controller.py | 90 + .../gold_module/controllers/router.py | 5 + .../gold_module/gold_module/gold_container.py | 36 + .../gold_module/models/gold_image_request.py | 90 + .../services/cloud_image_service.py | 126 + .../gold_module/services/image_service.py | 81 + .../server/modules/gold_module/pyproject.toml | 29 + .../image_search_module/algorithms/base.py | 111 + .../algorithms/sift_matcher.py | 278 + .../controllers/image_search_controller.py | 159 + .../image_search_container.py | 96 + .../image_search_module/models/ikb_models.py | 126 + .../models/search_request.py | 83 + .../repositories/ikb_repository.py | 76 + .../repositories/sift_features_repository.py | 52 + .../services/algorithm_factory.py | 36 + .../services/algorithm_service.py | 37 + .../services/ikb_service.py | 191 + .../services/image_matching_service.py | 104 + .../services/reference_image_service.py | 244 + .../image_search_module/pyproject.toml | 46 + .../image_search_module/tests/conftest.py | 35 + .../image_search_module/tests/db_setup.py | 123 + .../tests/test_crud_endpoints.py | 627 ++ .../tests/test_ikb_create_upload.py | 334 + .../tests/test_image_controller.py | 203 + .../tests/test_images/local_search.sh | 150 + .../tests/test_images/staging_search.sh | 208 + .../inference_module/__init__.py | 0 .../inference_module/controllers/__init__.py | 0 .../controllers/inference_controller.py | 244 + .../inference_module/inference_container.py | 20 + .../modules/inference_module/pyproject.toml | 40 + .../inference_module/tests/conftest.py | 179 + .../tests/test_inference_controller.py | 214 + .../controllers/dynamic_query_controller.py | 99 + .../controllers/pdo_controller.py | 223 + .../insights_module/controllers/router.py | 7 + .../insights_module/db/bigquery_connector.py | 85 + .../insights_module/db/redshift_connector.py | 151 + .../insights_module/insights_container.py | 84 + .../insights_module/models/dymanic_query.py | 30 + .../insights_module/models/insights_signal.py | 122 + .../models/insights_signal_query.py | 96 + .../models/lead_signal_query.py | 17 + .../models/leads_aggreegate.py | 12 + .../repository/pvo_repository.py | 231 + .../service/dynamic_query_service.py | 168 + .../service/insights_service.py | 353 + .../insights_module/service/pdo_service.py | 168 + .../service/usage_metric_service.py | 12 + .../insights_module/utils/helper.py | 19 + .../modules/insights_module/pyproject.toml | 49 + .../modules/insights_module/tests/conftest.py | 239 + .../tests/test_pvo_controller.py | 275 + .../knowledge_base_module/__init__.py | 5 + .../controllers/__init__.py | 0 .../controllers/knowledge_base_controller.py | 211 + .../knowledge_base_document_controller.py | 341 + .../controllers/rag_retreival_controller.py | 548 ++ .../embeddings/__init__.py | 0 .../knowledge_base_module/embeddings/embed.py | 53 + .../knowledge_base_module/embeddings/llm.py | 55 + .../knowledge_base_container.py | 85 + .../knowledge_base_module/models/__init__.py | 0 .../models/knowledge_base_schema.py | 14 + .../knowledge_base_module/queries/__init__.py | 7 + .../queries/generate_query.py | 291 + .../services/__init__.py | 0 .../services/image_rag_retrieve.py | 112 + .../services/kb_rag_retrieve.py | 182 + .../services/kb_rag_storage.py | 300 + .../knowledge_base_module/pyproject.toml | 60 + .../knowledge_base_module/tests/conftest.py | 263 + .../tests/test_knowledge_base_controller.py | 358 + ...test_knowledge_base_document_controller.py | 402 ++ .../tests/test_rag_retrieval_controller.py | 646 ++ .../llm_inference_config_module/container.py | 37 + .../controllers/inference_proxy_controller.py | 55 + .../llm_inference_config_controller.py | 311 + .../models/schemas.py | 69 + .../services/inference_proxy_service.py | 401 ++ .../services/llm_inference_config_service.py | 232 + .../utils/cache_invalidation.py | 78 + .../utils/cache_utils.py | 13 + .../pyproject.toml | 42 + .../tests/test_inference_proxy.py | 255 + .../server/modules/plugins_module/README.md | 0 .../plugins_module/controllers/__init__.py | 4 + .../controllers/authenticator_controller.py | 372 + .../controllers/datasource_controller.py | 744 ++ .../message_processor_controller.py | 326 + .../plugins_module/plugins_container.py | 63 + .../plugins_module/services/__init__.py | 0 .../services/authenticator_services.py | 366 + .../services/datasource_services.py | 84 + .../services/dynamic_query_service.py | 150 + .../services/message_processor_service.py | 184 + .../plugins_module/utils/__init__.py | 0 .../utils/authenticator_helper.py | 135 + .../plugins_module/utils/helper.py | 111 + .../modules/plugins_module/pyproject.toml | 46 + .../product_anaysis_controllers.py | 101 + .../models/product_analysis.py | 37 + .../product_analysis_container.py | 7 + .../product_analysis_service.py | 39 + .../product_analysis_module/pyproject.toml | 30 + .../product_analysis_module/tests/conftest.py | 219 + .../test_product_analysis_controllers.py | 371 + .../modules/tools_module/pyproject.toml | 40 + .../tools_module/available_tools.json | 161 + .../controllers/tools_controller.py | 185 + .../datasources/bigquery_tools.py | 317 + .../tools_module/datasources/provider.py | 55 + .../tools_module/email/email_tool.py | 21 + .../interfaces/tool_details_provider.py | 35 + .../knowlegebase/knowledge_base_tools.py | 45 + .../tools_module/knowlegebase/provider.py | 76 + .../tools_module/models/tool_schemas.py | 114 + .../registry/function_node_adapter.py | 222 + .../registry/function_node_registry.py | 69 + .../registry/function_registry.py | 46 + .../registries/datasource_registry.py | 31 + .../registry/registries/email_registry.py | 3 + .../registries/knowledge_base_registry.py | 5 + .../registries/util_function_registry.py | 7 + .../tools_module/registry/tool_loader.py | 97 + .../services/default_tool_provider.py | 24 + .../tools_module/services/tool_service.py | 124 + .../tools_module/tools_container.py | 46 + .../tools_module/utils/api_service_fn.py | 40 + .../utils/message_processor_fn.py | 43 + .../user_management_module/pyproject.toml | 48 + .../user_management_module/tests/conftest.py | 326 + .../tests/test_access_controller.py | 179 + .../tests/test_auth_controller.py | 831 +++ .../tests/test_user_controller.py | 1321 ++++ .../authorization/require_auth.py | 563 ++ .../user_management_module/constants/auth.py | 11 + .../controllers/access_controller.py | 393 + .../controllers/auth_controller.py | 293 + .../controllers/auth_plugin_controller.py | 622 ++ .../controllers/user_controller.py | 602 ++ .../dependencies/authorization.py | 38 + .../models/oauth_provider.py | 10 + .../user_management_module/models/resource.py | 68 + .../models/user_schema.py | 89 + .../user_management_module/router.py | 15 + .../services/account_inactivity_service.py | 60 + .../services/account_lockout_service.py | 204 + .../services/email_service.py | 198 + .../services/user_service.py | 218 + .../user_management_module/user_container.py | 99 + .../utils/password_utils.py | 13 + .../utils/user_utils.py | 80 + .../voice_agents_module/pyproject.toml | 34 + .../controllers/stt_config_controller.py | 250 + .../telephony_config_controller.py | 283 + .../controllers/tts_config_controller.py | 253 + .../controllers/voice_agent_controller.py | 415 ++ .../voice_agents_module/models/stt_schemas.py | 60 + .../models/telephony_schemas.py | 124 + .../voice_agents_module/models/tts_schemas.py | 63 + .../models/voice_agent_schemas.py | 74 + .../services/stt_config_service.py | 222 + .../services/telephony_config_service.py | 253 + .../services/tts_config_service.py | 225 + .../services/tts_generator_service.py | 228 + .../services/twilio_service.py | 67 + .../services/voice_agent_service.py | 490 ++ .../utils/cache_invalidation.py | 78 + .../voice_agents_module/utils/cache_utils.py | 46 + .../utils/storage_utils.py | 18 + .../utils/telephony_utils.py | 86 + .../voice_agents_container.py | 95 + wavefront/server/packages/flo_cloud/README.md | 0 .../flo_cloud/flo_cloud/_types/__init__.py | 19 + .../flo_cloud/_types/cloud_storage.py | 105 + .../flo_cloud/flo_cloud/_types/kms.py | 23 + .../flo_cloud/_types/message_queue.py | 37 + .../packages/flo_cloud/flo_cloud/aws/kms.py | 67 + .../flo_cloud/flo_cloud/aws/redshift.py | 652 ++ .../packages/flo_cloud/flo_cloud/aws/s3.py | 203 + .../packages/flo_cloud/flo_cloud/aws/sqs.py | 80 + .../flo_cloud/flo_cloud/azure/__init__.py | 0 .../flo_cloud/flo_cloud/cloud_storage.py | 189 + .../flo_cloud/flo_cloud/exceptions.py | 20 + .../flo_cloud/flo_cloud/gcp/bigquery.py | 1684 +++++ .../packages/flo_cloud/flo_cloud/gcp/gcs.py | 219 + .../packages/flo_cloud/flo_cloud/gcp/kms.py | 100 + .../flo_cloud/flo_cloud/gcp/pubsub.py | 83 + .../packages/flo_cloud/flo_cloud/kms.py | 36 + .../flo_cloud/flo_cloud/message_queue.py | 30 + .../server/packages/flo_cloud/pyproject.toml | 32 + wavefront/server/packages/flo_utils/README.md | 0 .../packages/flo_utils/flo_utils/__init__.py | 0 .../flo_utils/flo_utils/constants/workflow.py | 5 + .../legacy_db_repository/legacy_base_db.py | 57 + .../legacy_db_repository/legacy_bigquery.py | 216 + .../legacy_insights_repository.py | 54 + .../legacy_db_repository/legacy_redshift.py | 241 + .../legacy_schema_manager.py | 211 + .../flo_utils/flo_utils/llm/__init__.py | 0 .../packages/flo_utils/flo_utils/main.py | 6 + .../flo_utils/streaming/event_message.py | 8 + .../flo_utils/streaming/message_processor.py | 27 + .../flo_utils/streaming/stream_listner.py | 158 + .../flo_utils/flo_utils/utils/helper.py | 11 + .../packages/flo_utils/flo_utils/utils/log.py | 25 + .../server/packages/flo_utils/pyproject.toml | 35 + .../server/plugins/authenticator/README.md | 222 + .../authenticator/authenticator/__init__.py | 27 + .../authenticator/email_password/__init__.py | 4 + .../email_password/authenticator.py | 218 + .../authenticator/email_password/config.py | 21 + .../authenticator/authenticator/factory.py | 217 + .../authenticator/google_oauth/__init__.py | 4 + .../google_oauth/authenticator.py | 317 + .../authenticator/google_oauth/config.py | 15 + .../authenticator/authenticator/helper.py | 144 + .../authenticator/microsoft_oauth/__init__.py | 4 + .../microsoft_oauth/authenticator.py | 311 + .../authenticator/microsoft_oauth/config.py | 17 + .../authenticator/authenticator/types.py | 182 + .../plugins/authenticator/pyproject.toml | 24 + wavefront/server/plugins/datasource/README.md | 0 .../plugins/datasource/datasource/__init__.py | 122 + .../datasource/bigquery/__init__.py | 152 + .../datasource/datasource/bigquery/config.py | 11 + .../plugins/datasource/datasource/helper.py | 5 + .../datasource/datasource/odata_parser.py | 823 +++ .../datasource/redshift/__init__.py | 71 + .../datasource/datasource/redshift/config.py | 10 + .../plugins/datasource/datasource/types.py | 101 + .../server/plugins/datasource/pyproject.toml | 27 + .../datasource/tests/test_join_operations.py | 613 ++ .../datasource/tests/test_odata_parser_lex.py | 256 + wavefront/server/pyproject.toml | 35 + wavefront/server/uv.lock | 6404 +++++++++++++++++ 535 files changed, 69142 insertions(+) create mode 100644 wavefront/server/apps/call_processing/call_processing/__init__.py create mode 100644 wavefront/server/apps/call_processing/call_processing/cache/cache_manager.py create mode 100644 wavefront/server/apps/call_processing/call_processing/cache/cache_utils.py create mode 100644 wavefront/server/apps/call_processing/call_processing/config.ini create mode 100644 wavefront/server/apps/call_processing/call_processing/constants/__init__.py create mode 100644 wavefront/server/apps/call_processing/call_processing/constants/api_endpoints.py create mode 100644 wavefront/server/apps/call_processing/call_processing/constants/auth.py create mode 100644 wavefront/server/apps/call_processing/call_processing/controllers/cache_controller.py create mode 100644 wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py create mode 100644 wavefront/server/apps/call_processing/call_processing/di/application_container.py create mode 100644 wavefront/server/apps/call_processing/call_processing/log/logger.py create mode 100644 wavefront/server/apps/call_processing/call_processing/server.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/floware_http_client.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/llm_service.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/stt_service.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/tts_service.py create mode 100644 wavefront/server/apps/call_processing/call_processing/services/voice_agent_cache_service.py create mode 100644 wavefront/server/apps/call_processing/pyproject.toml create mode 100644 wavefront/server/apps/floconsole/floconsole/__init__.py create mode 100644 wavefront/server/apps/floconsole/floconsole/authorization/require_auth.py create mode 100644 wavefront/server/apps/floconsole/floconsole/config.ini create mode 100644 wavefront/server/apps/floconsole/floconsole/constants/app.py create mode 100644 wavefront/server/apps/floconsole/floconsole/constants/auth.py create mode 100644 wavefront/server/apps/floconsole/floconsole/controllers/__init__.py create mode 100644 wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py create mode 100644 wavefront/server/apps/floconsole/floconsole/controllers/auth_controller.py create mode 100644 wavefront/server/apps/floconsole/floconsole/controllers/floware_proxy_controller.py create mode 100644 wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/__init__.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic.ini create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/env.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/script.py.mako create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-73bcd253dd62_create_user_table.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-e3a2fa91cda2_create_session_table.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1554-521ae4960bcf_add_seed_user.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_21_1251-ac10dc573599_create_app_table.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_10_14_1514-e9a4691e0732_update_apps_table.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_11_24_1228-480783ba0ace_adding_type_column_to_app_table.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/base.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/connection.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/models/__init__.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/models/app.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/models/session.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/models/user.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/repositories/__init__.py create mode 100644 wavefront/server/apps/floconsole/floconsole/db/repositories/sql_alchemy_repository.py create mode 100644 wavefront/server/apps/floconsole/floconsole/di/application_container.py create mode 100644 wavefront/server/apps/floconsole/floconsole/server.py create mode 100644 wavefront/server/apps/floconsole/floconsole/services/app_service.py create mode 100644 wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py create mode 100644 wavefront/server/apps/floconsole/floconsole/services/token_service.py create mode 100644 wavefront/server/apps/floconsole/floconsole/utils/password_utils.py create mode 100644 wavefront/server/apps/floconsole/floconsole/utils/user_utils.py create mode 100644 wavefront/server/apps/floconsole/pyproject.toml create mode 100644 wavefront/server/apps/floware/floware/__init__.py create mode 100644 wavefront/server/apps/floware/floware/channels.py create mode 100644 wavefront/server/apps/floware/floware/config.ini create mode 100644 wavefront/server/apps/floware/floware/controllers/__init__.py create mode 100644 wavefront/server/apps/floware/floware/controllers/config_controller.py create mode 100644 wavefront/server/apps/floware/floware/controllers/notification_controller.py create mode 100644 wavefront/server/apps/floware/floware/decorators/with_lock.py create mode 100644 wavefront/server/apps/floware/floware/di/application_container.py create mode 100644 wavefront/server/apps/floware/floware/middleware/__init__.py create mode 100644 wavefront/server/apps/floware/floware/middleware/security_headers.py create mode 100644 wavefront/server/apps/floware/floware/server.py create mode 100644 wavefront/server/apps/floware/floware/services/config_service.py create mode 100644 wavefront/server/apps/floware/floware/services/notification_service.py create mode 100644 wavefront/server/apps/floware/floware/utils/network_utils.py create mode 100644 wavefront/server/apps/floware/floware/utils/yaml.py create mode 100644 wavefront/server/apps/floware/pyproject.toml create mode 100644 wavefront/server/apps/floware/tests/conftest.py create mode 100644 wavefront/server/apps/floware/tests/test_config_controller.py create mode 100644 wavefront/server/apps/inference_app/inference_app/__init__.py create mode 100644 wavefront/server/apps/inference_app/inference_app/config.ini create mode 100644 wavefront/server/apps/inference_app/inference_app/controllers/__init__.py create mode 100644 wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py create mode 100644 wavefront/server/apps/inference_app/inference_app/inference_app_container.py create mode 100644 wavefront/server/apps/inference_app/inference_app/server.py create mode 100644 wavefront/server/apps/inference_app/inference_app/service/__init__.py create mode 100644 wavefront/server/apps/inference_app/inference_app/service/image_analyser.py create mode 100644 wavefront/server/apps/inference_app/inference_app/service/image_embedding.py create mode 100644 wavefront/server/apps/inference_app/inference_app/service/model_inference.py create mode 100644 wavefront/server/apps/inference_app/inference_app/service/model_repository.py create mode 100644 wavefront/server/apps/inference_app/inference_app/utils/__init__.py create mode 100644 wavefront/server/apps/inference_app/inference_app/utils/image_utils.py create mode 100644 wavefront/server/apps/inference_app/pyproject.toml create mode 100644 wavefront/server/background_jobs/rag_ingestion/pyproject.toml create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/auth.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/embed.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/image_embed.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/env.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/main.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/doc_content.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/knowledge_base_embeddings.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/rag_message.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/file_processor.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/kb_storage_processor.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/kb_rag_storage.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/__init__.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/queue_message.py create mode 100644 wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/rag_streamer.py create mode 100644 wavefront/server/background_jobs/workflow_job/pyproject.toml create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/__init__.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/config.ini create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/constants/__init__.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/constants/auth.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/main.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/models.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/workflow_listener.py create mode 100644 wavefront/server/background_jobs/workflow_job/workflow_job/workflow_processor.py create mode 100644 wavefront/server/docker-compose.yml create mode 100644 wavefront/server/modules/agents_module/agents_module/agents_container.py create mode 100644 wavefront/server/modules/agents_module/agents_module/controllers/agent_controller.py create mode 100644 wavefront/server/modules/agents_module/agents_module/controllers/namespace_controller.py create mode 100644 wavefront/server/modules/agents_module/agents_module/controllers/workflow_controller.py create mode 100644 wavefront/server/modules/agents_module/agents_module/controllers/workflow_pipeline_controller.py create mode 100644 wavefront/server/modules/agents_module/agents_module/controllers/workflow_runs.py create mode 100644 wavefront/server/modules/agents_module/agents_module/models/agent_schemas.py create mode 100644 wavefront/server/modules/agents_module/agents_module/models/workflow_schemas.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/agent_crud_service.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/agent_inference_service.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/namespace_service.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/workflow_crud_service.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/workflow_events.py create mode 100644 wavefront/server/modules/agents_module/agents_module/services/workflow_inference_service.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/agent_utils.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/auth_utils.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/cache_utils.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/input_processing_utils.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/validation_utils.py create mode 100644 wavefront/server/modules/agents_module/agents_module/utils/workflow_utils.py create mode 100644 wavefront/server/modules/agents_module/pyproject.toml create mode 100644 wavefront/server/modules/agents_module/tests/test_input_processing_utils.py create mode 100644 wavefront/server/modules/api_services_module/README.md create mode 100644 wavefront/server/modules/api_services_module/api_services_module/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/api_services_container.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/auth/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/auth/handlers.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/auth/manager.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/config/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/config/parser.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/config/registry.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/core/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/core/manager.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/core/proxy.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/core/router.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/env.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/execution/execute.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/models/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/models/pipeline.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/models/service.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/pipeline/__init__.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/pipeline/builder.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/pipeline/stages.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/utils/api_change_processor.py create mode 100644 wavefront/server/modules/api_services_module/api_services_module/utils/api_change_publisher.py create mode 100644 wavefront/server/modules/api_services_module/pyproject.toml create mode 100644 wavefront/server/modules/api_services_module/tests/__init__.py create mode 100644 wavefront/server/modules/api_services_module/tests/conftest.py create mode 100644 wavefront/server/modules/api_services_module/tests/test_example.py create mode 100644 wavefront/server/modules/api_services_module/tests/test_integration_with_mock_backend.py create mode 100644 wavefront/server/modules/api_services_module/tests/test_service_deletion.py create mode 100644 wavefront/server/modules/api_services_module/tests/test_simple_integration.py create mode 100644 wavefront/server/modules/auth_module/auth_module/auth_container.py create mode 100644 wavefront/server/modules/auth_module/auth_module/controllers/hmac_controller.py create mode 100644 wavefront/server/modules/auth_module/auth_module/controllers/outlook_controller.py create mode 100644 wavefront/server/modules/auth_module/auth_module/controllers/superset_controller.py create mode 100644 wavefront/server/modules/auth_module/auth_module/services/client_token_service.py create mode 100644 wavefront/server/modules/auth_module/auth_module/services/outlook_service.py create mode 100644 wavefront/server/modules/auth_module/auth_module/services/superset_service.py create mode 100644 wavefront/server/modules/auth_module/auth_module/services/token_service.py create mode 100644 wavefront/server/modules/auth_module/pyproject.toml create mode 100644 wavefront/server/modules/auth_module/tests/conftest.py create mode 100644 wavefront/server/modules/auth_module/tests/data/idp_metadata.xml create mode 100644 wavefront/server/modules/auth_module/tests/data/key_gen.sh create mode 100644 wavefront/server/modules/auth_module/tests/data/test_private_key.pem create mode 100644 wavefront/server/modules/auth_module/tests/data/test_public_key.pem create mode 100644 wavefront/server/modules/auth_module/tests/fixtures/__init__.py create mode 100644 wavefront/server/modules/auth_module/tests/fixtures/keys/private_key.pem create mode 100644 wavefront/server/modules/auth_module/tests/fixtures/keys/public_key.pem create mode 100644 wavefront/server/modules/auth_module/tests/fixtures/test_keys.py create mode 100644 wavefront/server/modules/auth_module/tests/test_superset_controller.py create mode 100644 wavefront/server/modules/common_module/common_module/common_cache.py create mode 100644 wavefront/server/modules/common_module/common_module/common_container.py create mode 100644 wavefront/server/modules/common_module/common_module/feature/feature_flag.py create mode 100644 wavefront/server/modules/common_module/common_module/log/logger.py create mode 100644 wavefront/server/modules/common_module/common_module/middleware/__init__.py create mode 100644 wavefront/server/modules/common_module/common_module/middleware/request_id_middleware.py create mode 100644 wavefront/server/modules/common_module/common_module/models/response.py create mode 100644 wavefront/server/modules/common_module/common_module/prometheus/prometheus_middleware.py create mode 100644 wavefront/server/modules/common_module/common_module/response_formatter.py create mode 100644 wavefront/server/modules/common_module/common_module/scheduler.py create mode 100644 wavefront/server/modules/common_module/common_module/security/__init__.py create mode 100644 wavefront/server/modules/common_module/common_module/security/bearer_auth.py create mode 100644 wavefront/server/modules/common_module/common_module/utils/odata_parser.py create mode 100644 wavefront/server/modules/common_module/common_module/utils/serializer.py create mode 100644 wavefront/server/modules/common_module/pyproject.toml create mode 100644 wavefront/server/modules/common_module/tests/conftest.py create mode 100644 wavefront/server/modules/common_module/tests/test_odata_parser.py create mode 100644 wavefront/server/modules/common_module/tests/test_request_id_middleware.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic.ini create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/env.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/script.py.mako create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1326-f6b7ce8e5b03_create_a_baseline_migrations.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1327-17c4ba1a32fe_initializing_the_role_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_09_1030-756caddfb44b_truncating_role_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_17_1412-01a4c5202566_hash_existing_passwords.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_25_1855-9b10292a95eb_rbac_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_31_1715-36703628c7a6_adding_cacade_delete.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_29_1345-053823285206_create_leads_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1239-d77dca43b31d_renaming_column_in_leads_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1300-497a13558d60_created_the_kb_inference_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_20_1427-80a6b1232d5e_updated_the_knowledge_base_embeddings.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_24_1725-ba1f66ca0228_user_session.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_25_2103-0da695688814_cascade_rbac.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_06_11_1937-b365be32ca72_updated_the_knowledge_base_tables_for_.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_09_1936-827b9d399023_add_auth_secrets_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_10_1418-0db19a0af2af_added_datasource_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_14_1400-a1b2c3d4e5f7_create_authenticators_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_11_1516-68ffaa4a3665_create_product_analysis_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1309-1ef7d577ea53_add_account_lockout_fields_to_user_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1328-d5caffc321f2_create_config_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_14_1455-1aaf2b1e6d56_add_last_login_at_to_user_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_24_0717-f1f6e5c42780_created_model_inference_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_01_1703-23db0be3a87a_create_llm_inference_config_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_03_1230-bb3907e50d30_create_config_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_16_1332-bf901c107c8d_create_image_search_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_21_1535-d54e5612306e_workflow_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_24_0713-6742f38ca303_add_new_columns_in_knowledge_base_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_29_1346-22ec0134dcf8_create_voice_agents_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_03_1437-9bd8b7884ab0_update_the_metadata_column_in_knowledge_.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_04_1544-584f653169fd_add_parameters_to_llm_inference_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_13_1654-ed9fca299c85_create_agents_and_namespaces_tables.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_19_1515-af9dfcda24fb_add_message_processor_table.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_23_1015-a9a5d624020c_api_service.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_26_0656-ca83b60258d6_added_config_id_column_in_inference_.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/cache/cache_manager.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/database/base.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/database/connection.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/db_repo_container.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/agent.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/api_services.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/auth_secrets.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/authenticator.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/datasource.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/documents.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/dynamic_query_yaml.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/email.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/ikb_models.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/image_search_models.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/kb_inferences.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_documents.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_embeddings.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_bases.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/llm_inference_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/message_processors.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/model_schema.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/namespace.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/notification_users.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/notifications.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/oauth_credential.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/product_analytics.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/resource.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/role.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/role_resource.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/saml_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/session.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/stt_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/task.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/team.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/telephony_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/tts_config.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/user.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/user_role.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/voice_agent.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/workflow.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_pipeline.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_runs.py create mode 100644 wavefront/server/modules/db_repo_module/db_repo_module/repositories/sql_alchemy_repository.py create mode 100644 wavefront/server/modules/db_repo_module/pyproject.toml create mode 100644 wavefront/server/modules/gold_module/gold_module/controllers/image_controller.py create mode 100644 wavefront/server/modules/gold_module/gold_module/controllers/router.py create mode 100644 wavefront/server/modules/gold_module/gold_module/gold_container.py create mode 100644 wavefront/server/modules/gold_module/gold_module/models/gold_image_request.py create mode 100644 wavefront/server/modules/gold_module/gold_module/services/cloud_image_service.py create mode 100644 wavefront/server/modules/gold_module/gold_module/services/image_service.py create mode 100644 wavefront/server/modules/gold_module/pyproject.toml create mode 100644 wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/image_search_container.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/models/search_request.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py create mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py create mode 100644 wavefront/server/modules/image_search_module/pyproject.toml create mode 100644 wavefront/server/modules/image_search_module/tests/conftest.py create mode 100644 wavefront/server/modules/image_search_module/tests/db_setup.py create mode 100644 wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py create mode 100644 wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py create mode 100644 wavefront/server/modules/image_search_module/tests/test_image_controller.py create mode 100755 wavefront/server/modules/image_search_module/tests/test_images/local_search.sh create mode 100755 wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh create mode 100644 wavefront/server/modules/inference_module/inference_module/__init__.py create mode 100644 wavefront/server/modules/inference_module/inference_module/controllers/__init__.py create mode 100644 wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py create mode 100644 wavefront/server/modules/inference_module/inference_module/inference_container.py create mode 100644 wavefront/server/modules/inference_module/pyproject.toml create mode 100644 wavefront/server/modules/inference_module/tests/conftest.py create mode 100644 wavefront/server/modules/inference_module/tests/test_inference_controller.py create mode 100644 wavefront/server/modules/insights_module/insights_module/controllers/dynamic_query_controller.py create mode 100644 wavefront/server/modules/insights_module/insights_module/controllers/pdo_controller.py create mode 100644 wavefront/server/modules/insights_module/insights_module/controllers/router.py create mode 100644 wavefront/server/modules/insights_module/insights_module/db/bigquery_connector.py create mode 100644 wavefront/server/modules/insights_module/insights_module/db/redshift_connector.py create mode 100644 wavefront/server/modules/insights_module/insights_module/insights_container.py create mode 100644 wavefront/server/modules/insights_module/insights_module/models/dymanic_query.py create mode 100644 wavefront/server/modules/insights_module/insights_module/models/insights_signal.py create mode 100644 wavefront/server/modules/insights_module/insights_module/models/insights_signal_query.py create mode 100644 wavefront/server/modules/insights_module/insights_module/models/lead_signal_query.py create mode 100644 wavefront/server/modules/insights_module/insights_module/models/leads_aggreegate.py create mode 100644 wavefront/server/modules/insights_module/insights_module/repository/pvo_repository.py create mode 100644 wavefront/server/modules/insights_module/insights_module/service/dynamic_query_service.py create mode 100644 wavefront/server/modules/insights_module/insights_module/service/insights_service.py create mode 100644 wavefront/server/modules/insights_module/insights_module/service/pdo_service.py create mode 100644 wavefront/server/modules/insights_module/insights_module/service/usage_metric_service.py create mode 100644 wavefront/server/modules/insights_module/insights_module/utils/helper.py create mode 100644 wavefront/server/modules/insights_module/pyproject.toml create mode 100644 wavefront/server/modules/insights_module/tests/conftest.py create mode 100644 wavefront/server/modules/insights_module/tests/test_pvo_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/rag_retreival_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/embed.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/knowledge_base_container.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/knowledge_base_schema.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/generate_query.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/__init__.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/image_rag_retrieve.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_retrieve.py create mode 100644 wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_storage.py create mode 100644 wavefront/server/modules/knowledge_base_module/pyproject.toml create mode 100644 wavefront/server/modules/knowledge_base_module/tests/conftest.py create mode 100644 wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_document_controller.py create mode 100644 wavefront/server/modules/knowledge_base_module/tests/test_rag_retrieval_controller.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/container.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/inference_proxy_controller.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/llm_inference_config_controller.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/models/schemas.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/inference_proxy_service.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/llm_inference_config_service.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_invalidation.py create mode 100644 wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_utils.py create mode 100644 wavefront/server/modules/llm_inference_config_module/pyproject.toml create mode 100644 wavefront/server/modules/llm_inference_config_module/tests/test_inference_proxy.py create mode 100644 wavefront/server/modules/plugins_module/README.md create mode 100644 wavefront/server/modules/plugins_module/plugins_module/controllers/__init__.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/controllers/authenticator_controller.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/controllers/message_processor_controller.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/plugins_container.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/services/__init__.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/services/authenticator_services.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/services/dynamic_query_service.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/services/message_processor_service.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/utils/__init__.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/utils/authenticator_helper.py create mode 100644 wavefront/server/modules/plugins_module/plugins_module/utils/helper.py create mode 100644 wavefront/server/modules/plugins_module/pyproject.toml create mode 100644 wavefront/server/modules/product_analysis_module/product_analysis_module/controllers/product_anaysis_controllers.py create mode 100644 wavefront/server/modules/product_analysis_module/product_analysis_module/models/product_analysis.py create mode 100644 wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_container.py create mode 100644 wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_service.py create mode 100644 wavefront/server/modules/product_analysis_module/pyproject.toml create mode 100644 wavefront/server/modules/product_analysis_module/tests/conftest.py create mode 100644 wavefront/server/modules/product_analysis_module/tests/test_product_analysis_controllers.py create mode 100644 wavefront/server/modules/tools_module/pyproject.toml create mode 100644 wavefront/server/modules/tools_module/tools_module/available_tools.json create mode 100644 wavefront/server/modules/tools_module/tools_module/controllers/tools_controller.py create mode 100644 wavefront/server/modules/tools_module/tools_module/datasources/bigquery_tools.py create mode 100644 wavefront/server/modules/tools_module/tools_module/datasources/provider.py create mode 100644 wavefront/server/modules/tools_module/tools_module/email/email_tool.py create mode 100644 wavefront/server/modules/tools_module/tools_module/interfaces/tool_details_provider.py create mode 100644 wavefront/server/modules/tools_module/tools_module/knowlegebase/knowledge_base_tools.py create mode 100644 wavefront/server/modules/tools_module/tools_module/knowlegebase/provider.py create mode 100644 wavefront/server/modules/tools_module/tools_module/models/tool_schemas.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/function_node_adapter.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/function_node_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/function_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/registries/datasource_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/registries/email_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/registries/knowledge_base_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/registries/util_function_registry.py create mode 100644 wavefront/server/modules/tools_module/tools_module/registry/tool_loader.py create mode 100644 wavefront/server/modules/tools_module/tools_module/services/default_tool_provider.py create mode 100644 wavefront/server/modules/tools_module/tools_module/services/tool_service.py create mode 100644 wavefront/server/modules/tools_module/tools_module/tools_container.py create mode 100644 wavefront/server/modules/tools_module/tools_module/utils/api_service_fn.py create mode 100644 wavefront/server/modules/tools_module/tools_module/utils/message_processor_fn.py create mode 100644 wavefront/server/modules/user_management_module/pyproject.toml create mode 100644 wavefront/server/modules/user_management_module/tests/conftest.py create mode 100644 wavefront/server/modules/user_management_module/tests/test_access_controller.py create mode 100644 wavefront/server/modules/user_management_module/tests/test_auth_controller.py create mode 100644 wavefront/server/modules/user_management_module/tests/test_user_controller.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/authorization/require_auth.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/constants/auth.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/controllers/access_controller.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/controllers/auth_controller.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/controllers/auth_plugin_controller.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/controllers/user_controller.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/dependencies/authorization.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/models/oauth_provider.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/models/resource.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/models/user_schema.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/router.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/services/account_inactivity_service.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/services/account_lockout_service.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/services/email_service.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/services/user_service.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/user_container.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/utils/password_utils.py create mode 100644 wavefront/server/modules/user_management_module/user_management_module/utils/user_utils.py create mode 100644 wavefront/server/modules/voice_agents_module/pyproject.toml create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/stt_config_controller.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/telephony_config_controller.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/tts_config_controller.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/voice_agent_controller.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/models/stt_schemas.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/models/telephony_schemas.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/models/tts_schemas.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/models/voice_agent_schemas.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/stt_config_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/telephony_config_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_config_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_generator_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/twilio_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/services/voice_agent_service.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_invalidation.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_utils.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/utils/storage_utils.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/utils/telephony_utils.py create mode 100644 wavefront/server/modules/voice_agents_module/voice_agents_module/voice_agents_container.py create mode 100644 wavefront/server/packages/flo_cloud/README.md create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/_types/__init__.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/_types/cloud_storage.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/_types/kms.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/_types/message_queue.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/aws/kms.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/aws/redshift.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/aws/s3.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/aws/sqs.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/cloud_storage.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/exceptions.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/gcp/bigquery.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/gcp/gcs.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/gcp/kms.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/gcp/pubsub.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/kms.py create mode 100644 wavefront/server/packages/flo_cloud/flo_cloud/message_queue.py create mode 100644 wavefront/server/packages/flo_cloud/pyproject.toml create mode 100644 wavefront/server/packages/flo_utils/README.md create mode 100644 wavefront/server/packages/flo_utils/flo_utils/__init__.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/constants/workflow.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_base_db.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_bigquery.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_insights_repository.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_redshift.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/legacy_schema_manager/legacy_schema_manager.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/llm/__init__.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/main.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/streaming/event_message.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/streaming/message_processor.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/streaming/stream_listner.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/utils/helper.py create mode 100644 wavefront/server/packages/flo_utils/flo_utils/utils/log.py create mode 100644 wavefront/server/packages/flo_utils/pyproject.toml create mode 100644 wavefront/server/plugins/authenticator/README.md create mode 100644 wavefront/server/plugins/authenticator/authenticator/__init__.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/email_password/__init__.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/email_password/authenticator.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/email_password/config.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/factory.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/google_oauth/__init__.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/google_oauth/authenticator.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/google_oauth/config.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/helper.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/__init__.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/authenticator.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/config.py create mode 100644 wavefront/server/plugins/authenticator/authenticator/types.py create mode 100644 wavefront/server/plugins/authenticator/pyproject.toml create mode 100644 wavefront/server/plugins/datasource/README.md create mode 100644 wavefront/server/plugins/datasource/datasource/__init__.py create mode 100644 wavefront/server/plugins/datasource/datasource/bigquery/__init__.py create mode 100644 wavefront/server/plugins/datasource/datasource/bigquery/config.py create mode 100644 wavefront/server/plugins/datasource/datasource/helper.py create mode 100644 wavefront/server/plugins/datasource/datasource/odata_parser.py create mode 100644 wavefront/server/plugins/datasource/datasource/redshift/__init__.py create mode 100644 wavefront/server/plugins/datasource/datasource/redshift/config.py create mode 100644 wavefront/server/plugins/datasource/datasource/types.py create mode 100644 wavefront/server/plugins/datasource/pyproject.toml create mode 100644 wavefront/server/plugins/datasource/tests/test_join_operations.py create mode 100644 wavefront/server/plugins/datasource/tests/test_odata_parser_lex.py create mode 100644 wavefront/server/pyproject.toml create mode 100644 wavefront/server/uv.lock diff --git a/wavefront/server/apps/call_processing/call_processing/__init__.py b/wavefront/server/apps/call_processing/call_processing/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/call_processing/call_processing/cache/cache_manager.py b/wavefront/server/apps/call_processing/call_processing/cache/cache_manager.py new file mode 100644 index 00000000..f30d3b89 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/cache/cache_manager.py @@ -0,0 +1,206 @@ +import json +import os +import time +from typing import Any, Dict, Optional, Union + +from call_processing.log.logger import logger + +from redis import ConnectionError +from redis import ConnectionPool +from redis import Redis +from redis import RedisError +from redis import TimeoutError +from tenacity import retry +from tenacity import retry_if_exception_type +from tenacity import stop_after_attempt +from tenacity import wait_exponential + + +class CacheManager: + def __init__( + self, + namespace: str = '', + max_retries: int = 3, + initial_backoff: int = 1, + max_backoff: int = 10, + connection_timeout: int = 60, + socket_timeout: int = 60, + socket_keepalive: bool = True, + pool_size: int = 10, + ): + self.namespace = namespace + self.max_retries = max_retries + self.initial_backoff = initial_backoff + self.max_backoff = max_backoff + + self.pool = self._create_connection_pool( + connection_timeout=connection_timeout, + socket_timeout=socket_timeout, + socket_keepalive=socket_keepalive, + pool_size=pool_size, + ) + + self.redis = self._create_redis_connection() + logger.info('Connected to Redis with connection pooling enabled') + + def _create_connection_pool( + self, + connection_timeout: int, + socket_timeout: int, + socket_keepalive: bool, + pool_size: int, + ) -> ConnectionPool: + try: + return ConnectionPool( + host=str(os.getenv('REDIS_HOST', 'localhost')), + port=int(os.getenv('REDIS_PORT', 6379)), + db=int(os.getenv('REDIS_DB', 0)), + max_connections=pool_size, + socket_timeout=socket_timeout, + socket_keepalive=socket_keepalive, + socket_connect_timeout=connection_timeout, + retry_on_timeout=True, + health_check_interval=30, + encoding='utf-8', + decode_responses=True, + ) + except Exception as e: + logger.error(f'Failed to create connection pool: {e}s') + raise + + def _create_redis_connection(self) -> Redis: + logger.info('Creating Redis connection from pool...') + return Redis(connection_pool=self.pool) + + def _checking_redis_connection(self): + try: + self.redis.ping() + return True + except (ConnectionError, TimeoutError) as e: + logger.warning(f'Redis connection lost: {e}. Attempting to reconnect...') + self.redis = self._create_redis_connection() + return False + + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=1, max=10), + retry=retry_if_exception_type((RedisError, ConnectionError, TimeoutError)), + ) + def add( + self, + key: str, + value: Union[str, int, float, bytes], + expiry: int = 3600, + nx: bool = False, + ) -> bool: + try: + logger.info(f'Adding key: {key} to cache with expiry: {expiry} seconds') + return bool( + self.redis.set(f'{self.namespace}/{key}', value, ex=expiry, nx=nx) + ) + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error adding key: {key} to cache: {e}') + raise + + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=1, max=10), + retry=retry_if_exception_type((RedisError, ConnectionError, TimeoutError)), + ) + def get_str(self, key: str, default: Any = None) -> Optional[str]: + try: + value = self.redis.get(f'{self.namespace}/{key}') + return value if value is not None else default + + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error getting key: {key} from cache: {e}') + raise + + def get_int(self, key: str, default: int = 0) -> int: + value = self.get_str(key, default) + return int(value) if value is not None else default + + def get_json(self, key: str) -> Optional[Dict]: + """ + Get JSON value from cache + + Args: + key: Cache key + + Returns: + Parsed JSON dict or None if key not found + """ + try: + value = self.get_str(key) + if value is not None: + return json.loads(value) + return None + except json.JSONDecodeError as e: + logger.error(f'Error decoding JSON for key: {key}: {e}') + return None + + def set_json(self, key: str, value: Dict, expiry: int = 3600) -> bool: + """ + Set JSON value in cache + + Args: + key: Cache key + value: Dict to store as JSON + expiry: TTL in seconds (default 1 hour) + + Returns: + True if successful + """ + try: + json_str = json.dumps(value) + return self.add(key, json_str, expiry=expiry) + except (TypeError, ValueError) as e: + logger.error(f'Error encoding JSON for key: {key}: {e}') + return False + + def remove(self, key: str) -> bool: + try: + return bool(self.redis.delete(f'{self.namespace}/{key}')) + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error getting key: {key} from cache: {e}') + raise + + def invalidate_query(self, pattern: str) -> int: + """Remove all keys matching the given pattern""" + try: + # Get all keys matching the pattern + search_pattern = f'{self.namespace}/{pattern}' + keys = self.redis.keys(search_pattern) + if keys: + logger.info( + f'Invalidating {len(keys)} cache keys matching pattern: {pattern}' + ) + return self.redis.delete(*keys) + logger.info(f'No cache keys found matching pattern: {pattern}') + return 0 + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error removing keys with pattern: {pattern} from cache: {e}') + raise + + def close(self): + try: + self.pool.disconnect() + logger.info('Redis connection pool closed successfully') + except Exception as e: + logger.error(f'Error closing Redis connection pool: {e}') + + def _retry_with_backoff(self, func: callable, *args, **kwargs) -> Any: + retries = 0 + while retries < self.max_retries: + try: + return func(*args, **kwargs) + except (RedisError, ConnectionPool, TimeoutError) as e: + retries += 1 + if retries >= self.max_retries: + logger.error(f'Max retries reached for {func.__name__}: {e}') + raise + backoff = min( + self.initial_backoff * (2 ** (retries - 1)), self.max_backoff + ) + logger.warning(f'Retrying {func.__name__} in {backoff} seconds...') + time.sleep(backoff) diff --git a/wavefront/server/apps/call_processing/call_processing/cache/cache_utils.py b/wavefront/server/apps/call_processing/call_processing/cache/cache_utils.py new file mode 100644 index 00000000..aab6157f --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/cache/cache_utils.py @@ -0,0 +1,28 @@ +"""Cache key generation utilities for voice agent configurations""" + +from uuid import UUID + + +def get_voice_agent_cache_key(agent_id: UUID) -> str: + """Generate cache key for a voice agent""" + return f'voice_agent:{agent_id}' + + +def get_llm_config_cache_key(config_id: UUID) -> str: + """Generate cache key for an LLM config""" + return f'llm_inference_config:{config_id}' + + +def get_tts_config_cache_key(config_id: UUID) -> str: + """Generate cache key for a TTS config""" + return f'tts_config:{config_id}' + + +def get_stt_config_cache_key(config_id: UUID) -> str: + """Generate cache key for an STT config""" + return f'stt_config:{config_id}' + + +def get_telephony_config_cache_key(config_id: UUID) -> str: + """Generate cache key for a telephony config""" + return f'telephony_config:{config_id}' diff --git a/wavefront/server/apps/call_processing/call_processing/config.ini b/wavefront/server/apps/call_processing/call_processing/config.ini new file mode 100644 index 00000000..1d50b96c --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/config.ini @@ -0,0 +1,6 @@ +[env_config] +app_env = ${APP_ENV} +app_name_floware = ${APP_NAME_FLOWARE:floware} +app_name = ${APP_NAME:call_processing} +floware_base_url = ${FLOWARE_BASE_URL} +passthrough_secret = ${PASSTHROUGH_SECRET} diff --git a/wavefront/server/apps/call_processing/call_processing/constants/__init__.py b/wavefront/server/apps/call_processing/call_processing/constants/__init__.py new file mode 100644 index 00000000..4e3ca649 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/constants/__init__.py @@ -0,0 +1 @@ +# Empty file for package diff --git a/wavefront/server/apps/call_processing/call_processing/constants/api_endpoints.py b/wavefront/server/apps/call_processing/call_processing/constants/api_endpoints.py new file mode 100644 index 00000000..bc169447 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/constants/api_endpoints.py @@ -0,0 +1,20 @@ +"""API endpoint constants for floware config APIs""" + +# Floware config API endpoints +VOICE_AGENT_ENDPOINT = '/floware/v1/voice-agents/{agent_id}' +LLM_INFERENCE_CONFIG_ENDPOINT = '/floware/v1/llm-inference-configs/{config_id}' +TTS_CONFIG_ENDPOINT = '/floware/v1/tts-configs/{config_id}' +STT_CONFIG_ENDPOINT = '/floware/v1/stt-configs/{config_id}' +TELEPHONY_CONFIG_ENDPOINT = '/floware/v1/telephony-configs/{config_id}' + +# Config type mapping for cache invalidation +CONFIG_TYPE_ENDPOINTS = { + 'voice_agent': VOICE_AGENT_ENDPOINT, + 'llm_inference_config': LLM_INFERENCE_CONFIG_ENDPOINT, + 'tts_config': TTS_CONFIG_ENDPOINT, + 'stt_config': STT_CONFIG_ENDPOINT, + 'telephony_config': TELEPHONY_CONFIG_ENDPOINT, +} + +# Valid config types +VALID_CONFIG_TYPES = set(CONFIG_TYPE_ENDPOINTS.keys()) diff --git a/wavefront/server/apps/call_processing/call_processing/constants/auth.py b/wavefront/server/apps/call_processing/call_processing/constants/auth.py new file mode 100644 index 00000000..78d2ea8b --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/constants/auth.py @@ -0,0 +1,10 @@ +"""Authentication constants.""" + + +class RootfloHeaders: + CLIENT_KEY = 'X-Rootflo-Key' + PASSTHROUGH = 'X-Passthrough' + + +AUTH_ROLE_ID = 'call_processing' +SERVICE_AUTH_ROLE_ID = 'call-processing-service' diff --git a/wavefront/server/apps/call_processing/call_processing/controllers/cache_controller.py b/wavefront/server/apps/call_processing/call_processing/controllers/cache_controller.py new file mode 100644 index 00000000..eaae7c1a --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/controllers/cache_controller.py @@ -0,0 +1,211 @@ +"""Cache management endpoints for voice agent configurations""" + +import os +from uuid import UUID +from fastapi import APIRouter, HTTPException, Header, Depends, status +from fastapi.responses import JSONResponse +from pydantic import BaseModel +from typing import Optional + +from call_processing.log.logger import logger +from call_processing.services.voice_agent_cache_service import VoiceAgentCacheService +from call_processing.cache.cache_utils import ( + get_voice_agent_cache_key, + get_llm_config_cache_key, + get_tts_config_cache_key, + get_stt_config_cache_key, + get_telephony_config_cache_key, +) +from call_processing.constants.api_endpoints import VALID_CONFIG_TYPES +from call_processing.di.application_container import ApplicationContainer +from dependency_injector.wiring import inject, Provide + +cache_router = APIRouter(prefix='/cache') + + +class InvalidateCacheRequest(BaseModel): + """Request body for cache invalidation""" + + config_type: str + config_id: UUID + + +def verify_passthrough_auth(x_passthrough: Optional[str] = Header(None)) -> None: + """ + Verify passthrough authentication header. + In non-production, validates the passthrough header. + In production, skips validation (relies on service mesh/mTLS). + + Args: + x_passthrough: X-Passthrough header value + + Raises: + HTTPException: If authentication fails in non-production + """ + app_env = os.getenv('APP_ENV', 'dev') + + # In production, skip passthrough validation (use service mesh instead) + if app_env == 'production': + return + + # Non-production: Strict passthrough validation + expected_secret = os.getenv('PASSTHROUGH_SECRET') + + if not expected_secret: + logger.warning('Passthrough not configured') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Passthrough not configured', + ) + + if not x_passthrough: + logger.warning('Missing X-Passthrough header') + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Missing X-Passthrough header', + ) + + if x_passthrough != expected_secret: + logger.warning('Invalid X-Passthrough header') + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Invalid authentication credentials', + ) + + +@cache_router.post('/invalidate', status_code=200) +@inject +async def invalidate_cache( + request: InvalidateCacheRequest, + voice_agent_cache_service: VoiceAgentCacheService = Depends( + Provide[ApplicationContainer.voice_agent_cache_service] + ), + _auth: None = Depends(verify_passthrough_auth), +): + """ + Invalidate and refresh a specific config in cache + + This endpoint implements the "refresh" pattern: + 1. Remove the config from cache + 2. Fetch fresh config from floware API + 3. Store the fresh config back in cache + + Authentication: Requires X-Passthrough header + + Args: + request: Contains config_type and config_id + + Returns: + JSONResponse with success message + + Raises: + HTTPException: If config_type is invalid or API fetch fails + """ + config_type = request.config_type + config_id = request.config_id + + # Validate config type + if config_type not in VALID_CONFIG_TYPES: + logger.error(f'Invalid config type: {config_type}') + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid config_type. Must be one of: {", ".join(VALID_CONFIG_TYPES)}', + ) + + logger.info(f'Invalidating cache for {config_type} {config_id}') + + # Step 1: Get the appropriate cache key + cache_key_funcs = { + 'voice_agent': get_voice_agent_cache_key, + 'llm_inference_config': get_llm_config_cache_key, + 'tts_config': get_tts_config_cache_key, + 'stt_config': get_stt_config_cache_key, + 'telephony_config': get_telephony_config_cache_key, + } + + cache_key_func = cache_key_funcs.get(config_type) + if cache_key_func is None: + logger.error(f'No cache key function configured for config_type={config_type}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Cache key mapping not configured for given config_type', + ) + + cache_key = cache_key_func(config_id) + + # Step 2: Remove from cache + removed = voice_agent_cache_service.cache_manager.remove(cache_key) + if removed: + logger.info(f'Removed {config_type} {config_id} from cache') + else: + logger.warning(f'{config_type} {config_id} was not in cache') + + # Step 3: Fetch fresh config from floware API + try: + if not voice_agent_cache_service.floware_http_client: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Floware HTTP client not configured', + ) + + # Special handling for voice_agent vs other configs + if config_type == 'voice_agent': + fresh_config = ( + await voice_agent_cache_service.floware_http_client.fetch_voice_agent( + config_id + ) + ) + else: + fresh_config = ( + await voice_agent_cache_service.floware_http_client.fetch_config( + config_type, config_id + ) + ) + + # If config not found, just remove from cache (don't fail) + if not fresh_config: + logger.info( + f'{config_type} {config_id} not found in floware (likely deleted). ' + f'Removed from cache.' + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content={ + 'message': f'Cache invalidated for {config_type} {config_id}', + 'config_type': config_type, + 'config_id': str(config_id), + 'note': 'Config not found in floware - likely deleted', + }, + ) + + # Step 4: Store fresh config back in cache + success = voice_agent_cache_service.cache_manager.set_json( + cache_key, fresh_config, expiry=voice_agent_cache_service.cache_ttl + ) + + if not success: + logger.error(f'Failed to cache fresh config for {config_type} {config_id}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to cache fresh config', + ) + + logger.info(f'Successfully refreshed cache for {config_type} {config_id}') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content={ + 'message': f'Successfully invalidated and refreshed cache for {config_type} {config_id}', + 'config_type': config_type, + 'config_id': str(config_id), + }, + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f'Error refreshing cache for {config_type} {config_id}: {e}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Failed to refresh cache: {str(e)}', + ) diff --git a/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py b/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py new file mode 100644 index 00000000..08230762 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py @@ -0,0 +1,186 @@ +""" +Twilio webhook endpoints + +Handles TwiML generation and WebSocket audio streaming +""" + +import os +from uuid import UUID +from fastapi import APIRouter, WebSocket, Query, Depends +from fastapi.responses import Response +from twilio.twiml.voice_response import VoiceResponse, Connect, Stream +from call_processing.log.logger import logger +from dependency_injector.wiring import inject, Provide + +# Pipecat imports for WebSocket handling +from pipecat.runner.types import WebSocketRunnerArguments +from pipecat.runner.utils import parse_telephony_websocket +from pipecat.serializers.twilio import TwilioFrameSerializer +from pipecat.audio.vad.silero import SileroVADAnalyzer +from pipecat.audio.vad.vad_analyzer import VADParams +from pipecat.transports.websocket.fastapi import ( + FastAPIWebsocketTransport, + FastAPIWebsocketParams, +) + +from call_processing.services.voice_agent_cache_service import VoiceAgentCacheService +from call_processing.services.pipecat_service import PipecatService +from call_processing.di.application_container import ApplicationContainer + +webhook_router = APIRouter() + + +@webhook_router.post('/twiml') +async def twiml_endpoint( + voice_agent_id: str = Query(...), + welcome_message_audio_url: str = Query(default=''), +): + """ + Twilio TwiML endpoint + + Called by Twilio when call connects. + Returns TwiML XML with WebSocket connection instructions. + + Query params: + voice_agent_id: UUID of the voice agent configuration + welcome_message_audio_url: URL of the welcome message audio file + """ + logger.info(f'TwiML requested for voice_agent_id: {voice_agent_id}') + logger.info(f'Welcome message audio URL: {welcome_message_audio_url}') + + # Build WebSocket URL + base_url = os.getenv('CALL_PROCESSING_BASE_URL', 'http://localhost:8003') + + # Convert https:// to wss:// (or http:// to ws://) + if base_url.startswith('https://'): + websocket_url = base_url.replace('https://', 'wss://') + elif base_url.startswith('http://'): + websocket_url = base_url.replace('http://', 'ws://') + else: + websocket_url = f'wss://{base_url}' + + websocket_url = f'{websocket_url}/webhooks/ws' + + logger.info(f'WebSocket URL: {websocket_url}') + + # Generate TwiML response + response = VoiceResponse() + + # Play welcome message audio if URL is provided + if welcome_message_audio_url: + response.play(welcome_message_audio_url) + else: + logger.warning( + 'No welcome message audio URL provided, skipping welcome message' + ) + + connect = Connect() + stream = Stream(url=websocket_url) + + # Pass voice_agent_id as stream parameter + stream.parameter(name='voice_agent_id', value=voice_agent_id) + + connect.append(stream) + response.append(connect) + + # Pause for 60 seconds before auto-hangup (adjust as needed) + response.pause(length=60) + + twiml_xml = str(response) + logger.info(f'Returning TwiML: {twiml_xml}') + + return Response(content=twiml_xml, media_type='application/xml') + + +@webhook_router.websocket('/ws') +@inject +async def websocket_endpoint( + websocket: WebSocket, + voice_agent_cache_service: VoiceAgentCacheService = Depends( + Provide[ApplicationContainer.voice_agent_cache_service] + ), +): + """ + Twilio Media Stream WebSocket endpoint + + Handles bidirectional audio streaming with Pipecat pipeline. + """ + await websocket.accept() + logger.info('WebSocket connection accepted') + + try: + # Create runner arguments and parse Twilio connection + runner_args = WebSocketRunnerArguments(websocket=websocket) + transport_type, call_data = await parse_telephony_websocket( + runner_args.websocket + ) + + logger.info(f'Auto-detected transport: {transport_type}') + logger.info(f'Call data: {call_data}') + + # Extract voice_agent_id from stream parameters + body_data = call_data.get('body', {}) + voice_agent_id = body_data.get('voice_agent_id') + + if not voice_agent_id: + logger.error('voice_agent_id not found in stream parameters') + await websocket.close(code=1008, reason='Missing voice_agent_id') + return + + logger.info(f'Voice agent ID: {voice_agent_id}') + + # Convert voice_agent_id to UUID + try: + agent_uuid = UUID(voice_agent_id) + except ValueError: + logger.error(f'Invalid UUID format for voice_agent_id: {voice_agent_id}') + await websocket.close(code=1008, reason='Invalid voice_agent_id format') + return + + # Fetch all configs from cache with API fallback + configs = await voice_agent_cache_service.get_all_agent_configs(agent_uuid) + + logger.info('Successfully fetched all configs from cache') + + # Create Twilio frame serializer + serializer = TwilioFrameSerializer( + stream_sid=call_data['stream_id'], + call_sid=call_data['call_id'], + account_sid=configs['telephony_config']['credentials']['account_sid'], + auth_token=configs['telephony_config']['credentials']['auth_token'], + ) + + # Create FastAPI WebSocket transport + transport = FastAPIWebsocketTransport( + websocket=websocket, + params=FastAPIWebsocketParams( + audio_in_enabled=True, + audio_out_enabled=True, + audio_in_passthrough=True, + add_wav_header=False, # Twilio doesn't need WAV header + vad_analyzer=SileroVADAnalyzer( + params=VADParams( + confidence=0.7, # Default is 0.7, can lower to 0.4-0.5 for faster detection + start_secs=0.15, # Default is 0.2, keep it + stop_secs=0.5, # KEY: Lower from default 0.8 for faster cutoff + min_volume=0.6, # Default is 0.6, adjust based on your audio quality + ), + ), # Voice Activity Detection + serializer=serializer, + ), + ) + + # Run conversation pipeline + pipecat_service = PipecatService() + await pipecat_service.run_conversation( + transport=transport, + agent_config=configs['agent'], + llm_config=configs['llm_config'], + tts_config=configs['tts_config'], + stt_config=configs['stt_config'], + ) + + except Exception as e: + logger.error(f'Error in WebSocket endpoint: {e}', exc_info=True) + if websocket.client_state.name != 'DISCONNECTED': + await websocket.close(code=1011, reason='Internal error') diff --git a/wavefront/server/apps/call_processing/call_processing/di/application_container.py b/wavefront/server/apps/call_processing/call_processing/di/application_container.py new file mode 100644 index 00000000..d8b71a83 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/di/application_container.py @@ -0,0 +1,31 @@ +from dependency_injector import containers +from dependency_injector import providers + +from call_processing.cache.cache_manager import CacheManager +from call_processing.services.voice_agent_cache_service import VoiceAgentCacheService +from call_processing.services.floware_http_client import FlowareHttpClient + + +class ApplicationContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['./config.ini']) + + # Cache + cache_manager = providers.Singleton( + CacheManager, namespace=config.env_config.app_name + ) + + # HTTP Client for floware + floware_http_client = providers.Singleton( + FlowareHttpClient, + base_url=config.env_config.floware_base_url, + passthrough_secret=config.env_config.passthrough_secret, + app_env=config.env_config.app_env, + timeout=30.0, + ) + + # Services + voice_agent_cache_service = providers.Singleton( + VoiceAgentCacheService, + cache_manager=cache_manager, + floware_http_client=floware_http_client, + ) diff --git a/wavefront/server/apps/call_processing/call_processing/log/logger.py b/wavefront/server/apps/call_processing/call_processing/log/logger.py new file mode 100644 index 00000000..539d1c44 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/log/logger.py @@ -0,0 +1,42 @@ +import logging +import os + + +class RequestAwareFormatter(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + return super().format(record) + + +class RequestAwareLogger(logging.Logger): + def error(self, msg, *args, **kwargs): + """Override error method to always include exc_info=True.""" + if 'exc_info' not in kwargs: + kwargs['exc_info'] = True + super().error(msg, *args, **kwargs) + + +log_level = os.environ.get('LOG_LEVEL', 'INFO') +logging.getLogger('uvicorn').setLevel(log_level) + +log_format = ( + '%(asctime)s | %(levelname)-8s | %(name)s | ' + '%(filename)s:%(lineno)d | %(message)s' +) + +formatter = RequestAwareFormatter(fmt=log_format, datefmt='%Y-%m-%d %H:%M:%S') + +logging.setLoggerClass(RequestAwareLogger) + +logging.basicConfig( + level=log_level, + format=log_format, + datefmt='%Y-%m-%d %H:%M:%S', + force=True, # Override any existing configuration +) + +# Get root logger and apply custom formatter +root_logger = logging.getLogger() +for handler in root_logger.handlers: + handler.setFormatter(formatter) + +logger = logging.getLogger('call_processing') diff --git a/wavefront/server/apps/call_processing/call_processing/server.py b/wavefront/server/apps/call_processing/call_processing/server.py new file mode 100644 index 00000000..91ce8cae --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/server.py @@ -0,0 +1,117 @@ +import glob +import os + +from call_processing.log.logger import logger +from dotenv import load_dotenv +from fastapi import FastAPI, HTTPException, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +import uvicorn + +from call_processing.di.application_container import ApplicationContainer +from call_processing.controllers.webhook_controller import webhook_router +from call_processing.controllers.cache_controller import cache_router + +load_dotenv() + +environment = os.getenv('APP_ENV', 'dev') + +# Initialize containers +application_container = ApplicationContainer() + +# Wire containers +application_container.wire( + modules=[__name__], + packages=[ + 'call_processing.controllers', + ], +) + + +app = FastAPI( + title='Call Processing API', + description='Real-time voice call processing with Pipecat', + version='1.0.0', +) + +origins = os.getenv('ALLOWED_ORIGINS', 'http://localhost:8001') +allowed_origins = origins.split(',') + +# Configure CORS with proper security settings +app.add_middleware( + CORSMiddleware, + allow_origins=allowed_origins, + allow_credentials=True, + allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], + allow_headers=['*'], + expose_headers=[ + 'X-Content-Type-Options', + 'X-XSS-Protection', + 'X-Frame-Options', + 'Referrer-Policy', + 'Content-Security-Policy', + 'Pragma', + 'Expires', + 'Strict-Transport-Security', + 'Cache-Control', + ], +) + +# Include routers +app.include_router(webhook_router, prefix='/webhooks') +app.include_router(cache_router, prefix='/api') + + +@app.get('/health') +async def health_check(): + """Health check endpoint""" + return JSONResponse( + content={'status': 'healthy', 'service': 'call-processing'}, status_code=200 + ) + + +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception): + # Skip HTTPExceptions (they're handled by FastAPI) + if isinstance(exc, HTTPException): + raise exc + + error_message = 'An unexpected error has occurred while performing this action, please try again' + if environment != 'production': + error_message += f' - {str(exc)}' + + logger.error(f'Error in API call: {exc}', exc_info=True) + + return JSONResponse( + status_code=500, + content=error_message, + ) + + +environment = os.getenv('APP_ENV', 'dev') + +# Running with Uvicorn (for local development) +if __name__ == '__main__': + print(f'Starting application in environment: {environment}') + if environment == 'production': + uvicorn.run( + 'server:app', host='0.0.0.0', port=8004, workers=1, log_level='critical' + ) + print(f'Started application in environment: {environment}') + + else: + dirs = glob.glob('../../..//**/*_module/**', recursive=True) + dirs.extend(glob.glob('../../..//**/plugins/**', recursive=True)) + dirs.extend(glob.glob('../../..//**/packages/**', recursive=True)) + dirs.append('../../call_processing') + + uvicorn.run( + 'server:app', + host='0.0.0.0', + port=8004, + workers=1, + reload=True, + reload_includes=dirs, + log_level='info', + ) + print(f'Started application in environment: {environment}') diff --git a/wavefront/server/apps/call_processing/call_processing/services/floware_http_client.py b/wavefront/server/apps/call_processing/call_processing/services/floware_http_client.py new file mode 100644 index 00000000..cd8fe97f --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/floware_http_client.py @@ -0,0 +1,125 @@ +"""HTTP client for making requests to floware APIs""" + +import httpx +from typing import Dict, Any, Optional +from uuid import UUID + +from call_processing.log.logger import logger +from call_processing.constants.api_endpoints import ( + CONFIG_TYPE_ENDPOINTS, + VOICE_AGENT_ENDPOINT, +) +from call_processing.constants.auth import RootfloHeaders + + +class FlowareHttpClient: + """HTTP client for making requests to floware APIs""" + + def __init__( + self, + base_url: str, + passthrough_secret: str, + app_env: str = 'production', + timeout: float = 30.0, + ): + self.base_url = base_url.rstrip('/') + self.passthrough_secret = passthrough_secret + self.app_env = app_env + self.timeout = timeout + + def _get_headers(self) -> Dict[str, str]: + """ + Generate request headers with environment-aware authentication. + Only includes passthrough header in non-production environments. + """ + headers: Dict[str, str] = {'Content-Type': 'application/json'} + + # Add passthrough header for non-production environments + if self.app_env != 'production' and self.passthrough_secret: + headers[RootfloHeaders.PASSTHROUGH] = self.passthrough_secret + + return headers + + async def fetch_voice_agent(self, agent_id: UUID) -> Optional[Dict[str, Any]]: + """ + Fetch a voice agent from floware API + + Args: + agent_id: UUID of the voice agent + + Returns: + Voice agent dict if successful + + Raises: + httpx.HTTPStatusError: If API returns 4xx/5xx error + httpx.RequestError: If request fails (network error, timeout, etc.) + + """ + url = f'{self.base_url}{VOICE_AGENT_ENDPOINT.format(agent_id=agent_id)}' + + async with httpx.AsyncClient(timeout=self.timeout) as client: + try: + response = await client.get(url, headers=self._get_headers()) + response.raise_for_status() + + # Extract voice agent from response data structure + data = response.json() + if 'data' in data: + # Handle response_formatter wrapped response + return data['data'] + return data + + except httpx.HTTPStatusError as e: + logger.error( + f'HTTP error fetching voice_agent {agent_id}: ' + f'status={e.response.status_code}' + ) + raise + except httpx.RequestError as e: + logger.error(f'Request error fetching voice_agent {agent_id}: {e}') + raise + + async def fetch_config( + self, config_type: str, config_id: UUID + ) -> Optional[Dict[str, Any]]: + """ + Fetch a config from floware API + + Args: + config_type: Type of config (llm_inference_config, tts_config, etc.) + config_id: UUID of the config + + Returns: + Config dict if successful + + Raises: + ValueError: If config_type is not valid + httpx.HTTPStatusError: If API returns 4xx/5xx error + httpx.RequestError: If request fails (network error, timeout, etc.) + """ + endpoint = CONFIG_TYPE_ENDPOINTS.get(config_type) + if not endpoint: + raise ValueError(f'Invalid config type: {config_type}') + + url = f'{self.base_url}{endpoint.format(config_id=config_id)}' + + async with httpx.AsyncClient(timeout=self.timeout) as client: + try: + response = await client.get(url, headers=self._get_headers()) + response.raise_for_status() + + # Extract config from response_formatter wrapped response + data = response.json() + if 'data' in data: + return data['data'] + return data + + except httpx.HTTPStatusError as e: + logger.error( + f'HTTP error fetching {config_type} {config_id}: ' + f'status={e.response.status_code}' + ) + raise + except httpx.RequestError as e: + logger.error(f'Request error fetching {config_type} {config_id}: {e}') + raise diff --git a/wavefront/server/apps/call_processing/call_processing/services/llm_service.py b/wavefront/server/apps/call_processing/call_processing/services/llm_service.py new file mode 100644 index 00000000..790ac6b7 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/llm_service.py @@ -0,0 +1,146 @@ +""" +LLM (Language Model) service factory + +Supports multiple providers: OpenAI, Anthropic, Google, etc. +""" + +from typing import Dict, Any +from call_processing.log.logger import logger + +# Pipecat LLM services +from pipecat.services.openai.llm import OpenAILLMService +from pipecat.services.openai.base_llm import BaseOpenAILLMService +from pipecat.services.google.llm import GoogleLLMService +from pipecat.services.groq.llm import GroqLLMService +# Add more as needed + + +class LLMServiceFactory: + """Factory for creating LLM service instances from configuration""" + + @staticmethod + def create_llm_service(llm_config: Dict[str, Any]): + """ + Create LLM service from configuration + + Args: + llm_config: { + 'type': 'openai' | 'anthropic' | 'google' | 'groq', + 'api_key': 'key', + 'llm_model': 'gpt-4', + 'parameters': { + 'temperature': 0.7, + 'max_tokens': 150, + ... + } + } + + Returns: + Pipecat LLM service instance + """ + llm_type = llm_config['type'] + api_key = llm_config['api_key'] + model = llm_config['llm_model'] + parameters = llm_config.get('parameters', {}) + + if parameters is None: + parameters = {} + + logger.info(f'Creating LLM service: {llm_type} / {model}') + + if llm_type == 'openai': + return LLMServiceFactory._create_openai_llm(api_key, model, parameters) + elif llm_type == 'google': + return LLMServiceFactory._create_google_llm(api_key, model, parameters) + elif llm_type == 'groq': + return LLMServiceFactory._create_groq_llm(api_key, model, parameters) + else: + raise ValueError(f'Unsupported LLM type: {llm_type}') + + @staticmethod + def _create_openai_llm(api_key: str, model: str, parameters: Dict[str, Any]): + """Create OpenAI LLM service""" + # Build InputParams from the parameters dict + params_dict = {} + + if 'temperature' in parameters: + params_dict['temperature'] = parameters['temperature'] + if 'max_tokens' in parameters: + params_dict['max_tokens'] = parameters['max_tokens'] + if 'max_completion_tokens' in parameters: + params_dict['max_completion_tokens'] = parameters['max_completion_tokens'] + if 'top_p' in parameters: + params_dict['top_p'] = parameters['top_p'] + if 'frequency_penalty' in parameters: + params_dict['frequency_penalty'] = parameters['frequency_penalty'] + if 'presence_penalty' in parameters: + params_dict['presence_penalty'] = parameters['presence_penalty'] + if 'seed' in parameters: + params_dict['seed'] = parameters['seed'] + if 'service_tier' in parameters: + params_dict['service_tier'] = parameters['service_tier'] + + # Create InputParams object + input_params = BaseOpenAILLMService.InputParams(**params_dict) + + logger.info( + f"OpenAI LLM config: model={model}, temp={params_dict.get('temperature', 'default')}" + ) + + return OpenAILLMService(api_key=api_key, model=model, params=input_params) + + @staticmethod + def _create_google_llm(api_key: str, model: str, parameters: Dict[str, Any]): + """Create Google LLM service""" + # Build InputParams from the parameters dict + params_dict = {} + + if 'temperature' in parameters: + params_dict['temperature'] = parameters['temperature'] + if 'max_tokens' in parameters: + params_dict['max_tokens'] = parameters['max_tokens'] + if 'top_p' in parameters: + params_dict['top_p'] = parameters['top_p'] + if 'top_k' in parameters: + params_dict['top_k'] = parameters['top_k'] + + # Create InputParams object + input_params = GoogleLLMService.InputParams(**params_dict) + + logger.info( + f"Google LLM config: model={model}, temp={params_dict.get('temperature', 'default')}" + ) + + return GoogleLLMService(api_key=api_key, model=model, params=input_params) + + @staticmethod + def _create_groq_llm(api_key: str, model: str, parameters: Dict[str, Any]): + """Create Groq LLM service""" + # Build InputParams from the parameters dict + params_dict = {} + + if 'temperature' in parameters: + params_dict['temperature'] = parameters['temperature'] + if 'max_tokens' in parameters: + params_dict['max_tokens'] = parameters['max_tokens'] + if 'max_completion_tokens' in parameters: + params_dict['max_completion_tokens'] = parameters['max_completion_tokens'] + if 'top_p' in parameters: + params_dict['top_p'] = parameters['top_p'] + if 'frequency_penalty' in parameters: + params_dict['frequency_penalty'] = parameters['frequency_penalty'] + if 'presence_penalty' in parameters: + params_dict['presence_penalty'] = parameters['presence_penalty'] + if 'seed' in parameters: + params_dict['seed'] = parameters['seed'] + if 'service_tier' in parameters: + params_dict['service_tier'] = parameters['service_tier'] + + # Create InputParams object + input_params = GroqLLMService.InputParams(**params_dict) + + logger.info( + f"Groq LLM config: model={model}, temp={params_dict.get('temperature', 'default')}" + ) + + return GroqLLMService(api_key=api_key, model=model, params=input_params) diff --git a/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py b/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py new file mode 100644 index 00000000..99210627 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py @@ -0,0 +1,111 @@ +""" +Pipecat pipeline orchestration service + +Creates and runs the voice conversation pipeline using configured STT/LLM/TTS services +""" + +from typing import Dict, Any +from call_processing.log.logger import logger + +# Pipecat core imports +from pipecat.audio.interruptions.min_words_interruption_strategy import ( + MinWordsInterruptionStrategy, +) +from pipecat.pipeline.pipeline import Pipeline +from pipecat.pipeline.runner import PipelineRunner +from pipecat.pipeline.task import PipelineParams, PipelineTask +from pipecat.processors.aggregators.llm_context import LLMContext +from pipecat.processors.aggregators.llm_response_universal import ( + LLMContextAggregatorPair, +) +from pipecat.transports.base_transport import BaseTransport + +from call_processing.services.stt_service import STTServiceFactory +from call_processing.services.tts_service import TTSServiceFactory +from call_processing.services.llm_service import LLMServiceFactory + + +class PipecatService: + """Service for creating and running Pipecat pipelines""" + + async def run_conversation( + self, + transport: BaseTransport, + agent_config: Dict[str, Any], + llm_config: Dict[str, Any], + tts_config: Dict[str, Any], + stt_config: Dict[str, Any], + ): + """ + Create and run the Pipecat pipeline for a voice conversation + + Args: + transport: Pipecat transport (e.g., WebSocket transport from Twilio) + agent_config: Voice agent configuration including system_prompt + llm_config: LLM provider configuration + tts_config: TTS provider configuration + stt_config: STT provider configuration + """ + logger.info(f"Starting conversation for agent: {agent_config['name']}") + + # Create services using factories + stt = STTServiceFactory.create_stt_service(stt_config) + llm = LLMServiceFactory.create_llm_service(llm_config) + tts = TTSServiceFactory.create_tts_service(tts_config) + + # Create initial messages with system prompt + messages = [ + { + 'role': 'system', + 'content': agent_config['system_prompt'], + } + ] + + # Create LLM context and aggregator + context = LLMContext(messages) + context_aggregator = LLMContextAggregatorPair(context) + + # Create pipeline + pipeline = Pipeline( + [ + transport.input(), # Audio input from Twilio + stt, # Speech-to-Text + context_aggregator.user(), # Add user message to context + llm, # LLM processing + tts, # Text-to-Speech + transport.output(), # Audio output to Twilio + context_aggregator.assistant(), # Add assistant response to context + ] + ) + + # Create pipeline task with Twilio-specific parameters + task = PipelineTask( + pipeline, + params=PipelineParams( + audio_in_sample_rate=8000, # Twilio uses 8kHz + audio_out_sample_rate=8000, + enable_metrics=True, + # enable_usage_metrics=True, + allow_interruptions=True, + interruption_strategies=[MinWordsInterruptionStrategy(min_words=2)], + # report_only_initial_ttfb=True + ), + idle_timeout_secs=12, + ) + + # Register event handlers + @transport.event_handler('on_client_connected') + async def on_client_connected(transport, client): + logger.info(f"Client connected for agent: {agent_config['name']}") + # Bot waits for user to speak first (can be changed to greet first) + + @transport.event_handler('on_client_disconnected') + async def on_client_disconnected(transport, client): + logger.info(f"Client disconnected for agent: {agent_config['name']}") + await task.cancel() + + # Run pipeline + runner = PipelineRunner(handle_sigint=False) + await runner.run(task) + + logger.info(f"Conversation ended for agent: {agent_config['name']}") diff --git a/wavefront/server/apps/call_processing/call_processing/services/stt_service.py b/wavefront/server/apps/call_processing/call_processing/services/stt_service.py new file mode 100644 index 00000000..964520e0 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/stt_service.py @@ -0,0 +1,119 @@ +""" +STT (Speech-to-Text) service factory + +Supports multiple providers: Deepgram, AssemblyAI, Whisper, Google, Azure +""" + +from typing import Dict, Any +from call_processing.log.logger import logger + +# Pipecat STT services +from pipecat.services.deepgram.stt import DeepgramSTTService + +# Deepgram options +from deepgram import LiveOptions + +# Add more as needed: +# from pipecat.services.assemblyai.stt import AssemblyAISTTService +# from pipecat.services.whisper.stt import WhisperSTTService + + +class STTServiceFactory: + """Factory for creating STT service instances from configuration""" + + @staticmethod + def create_stt_service(stt_config: Dict[str, Any]): + """ + Create STT service from configuration + + Args: + stt_config: { + 'provider': 'deepgram' | 'assemblyai' | 'whisper' | 'google' | 'azure', + 'api_key': 'key', + 'parameters': { + 'model': 'nova-2', + 'language': 'en', + ... + } + } + + Returns: + Pipecat STT service instance + """ + provider = stt_config['provider'] + api_key = stt_config['api_key'] + parameters = stt_config.get('parameters', {}) + + if parameters is None: + parameters = {} + + logger.info(f'Creating STT service: {provider}') + + if provider == 'deepgram': + return STTServiceFactory._create_deepgram_stt(api_key, parameters) + elif provider == 'assemblyai': + return STTServiceFactory._create_assemblyai_stt(api_key, parameters) + elif provider == 'whisper': + return STTServiceFactory._create_whisper_stt(api_key, parameters) + else: + raise ValueError(f'Unsupported STT provider: {provider}') + + @staticmethod + def _create_deepgram_stt(api_key: str, parameters: Dict[str, Any]): + """Create Deepgram STT service""" + # Build LiveOptions from the parameters dict + options_dict = {} + + # Add parameters from config + if 'model' in parameters: + options_dict['model'] = parameters['model'] + if 'language' in parameters: + options_dict['language'] = parameters['language'] + if 'interim_results' in parameters: + options_dict['interim_results'] = parameters['interim_results'] + if 'encoding' in parameters: + options_dict['encoding'] = parameters['encoding'] + if 'sample_rate' in parameters: + options_dict['sample_rate'] = parameters['sample_rate'] + if 'endpointing' in parameters: + options_dict['endpointing'] = parameters['endpointing'] + if 'channels' in parameters: + options_dict['channels'] = parameters['channels'] + if 'smart_format' in parameters: + options_dict['smart_format'] = parameters['smart_format'] + if 'punctuate' in parameters: + options_dict['punctuate'] = parameters['punctuate'] + if 'profanity_filter' in parameters: + options_dict['profanity_filter'] = parameters['profanity_filter'] + if 'vad_events' in parameters: + options_dict['vad_events'] = parameters['vad_events'] + + # Set smart defaults if not provided + options_dict.setdefault( + 'interim_results', True + ) # Always enable for faster feedback + options_dict.setdefault('endpointing', 300) # 300ms = faster cutoff + options_dict.setdefault('encoding', 'linear16') + options_dict.setdefault('sample_rate', 8000) + options_dict.setdefault('model', 'nova-2') + + # Create LiveOptions object + live_options = LiveOptions(**options_dict) + + logger.info( + f"Deepgram STT config: model={options_dict.get('model', 'default')}" + ) + + return DeepgramSTTService(api_key=api_key, live_options=live_options) + + @staticmethod + def _create_assemblyai_stt(api_key: str, parameters: Dict[str, Any]): + """Create AssemblyAI STT service""" + # TODO: Implement AssemblyAI + raise NotImplementedError('AssemblyAI STT provider not yet implemented') + + @staticmethod + def _create_whisper_stt(api_key: str, parameters: Dict[str, Any]): + """Create Whisper STT service""" + # TODO: Implement Whisper + raise NotImplementedError('Whisper STT provider not yet implemented') diff --git a/wavefront/server/apps/call_processing/call_processing/services/tts_service.py b/wavefront/server/apps/call_processing/call_processing/services/tts_service.py new file mode 100644 index 00000000..c8ac3a9d --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/tts_service.py @@ -0,0 +1,166 @@ +""" +TTS (Text-to-Speech) service factory + +Supports multiple providers: ElevenLabs, Deepgram, Cartesia, Azure, Google, AWS +""" + +from typing import Dict, Any +from call_processing.log.logger import logger + +# Pipecat TTS services +from pipecat.services.elevenlabs.tts import ElevenLabsTTSService +from pipecat.services.deepgram.tts import DeepgramTTSService +from pipecat.services.cartesia.tts import CartesiaTTSService + +# Language for params +from pipecat.transcriptions.language import Language + +# Add more as needed: +# from pipecat.services.azure.tts import AzureTTSService +# from pipecat.services.google.tts import GoogleTTSService + + +class TTSServiceFactory: + """Factory for creating TTS service instances from configuration""" + + @staticmethod + def create_tts_service(tts_config: Dict[str, Any]): + """ + Create TTS service from configuration + + Args: + tts_config: { + 'provider': 'elevenlabs' | 'deepgram' | 'cartesia' | 'azure' | 'google' | 'aws', + 'api_key': 'key', + 'voice_id': 'voice_id', + 'parameters': { + 'model': 'model_name', + 'stability': 0.5, + 'similarity_boost': 0.75, + ... + } + } + + Returns: + Pipecat TTS service instance + """ + provider = tts_config['provider'] + api_key = tts_config['api_key'] + voice_id = tts_config['voice_id'] + parameters = tts_config.get('parameters', {}) + + if parameters is None: + parameters = {} + + logger.info(f'Creating TTS service: {provider} / voice: {voice_id}') + + if provider == 'elevenlabs': + return TTSServiceFactory._create_elevenlabs_tts( + api_key, voice_id, parameters + ) + elif provider == 'deepgram': + return TTSServiceFactory._create_deepgram_tts(api_key, voice_id, parameters) + elif provider == 'cartesia': + return TTSServiceFactory._create_cartesia_tts(api_key, voice_id, parameters) + else: + raise ValueError(f'Unsupported TTS provider: {provider}') + + @staticmethod + def _create_elevenlabs_tts(api_key: str, voice_id: str, parameters: Dict[str, Any]): + """Create ElevenLabs TTS service""" + # Model is a direct parameter, not in InputParams + model = parameters.get('model', 'eleven_turbo_v2_5') + + # Build InputParams from the parameters dict + params_dict = {} + + if 'language' in parameters: + # Convert string to Language enum if needed + lang = parameters['language'] + if isinstance(lang, str): + try: + params_dict['language'] = Language(lang) + except ValueError: + logger.warning(f"Unknown language '{lang}', skipping") + else: + params_dict['language'] = lang + + if 'stability' in parameters: + params_dict['stability'] = parameters['stability'] + if 'similarity_boost' in parameters: + params_dict['similarity_boost'] = parameters['similarity_boost'] + if 'style' in parameters: + params_dict['style'] = parameters['style'] + if 'use_speaker_boost' in parameters: + params_dict['use_speaker_boost'] = parameters['use_speaker_boost'] + if 'speed' in parameters: + params_dict['speed'] = parameters['speed'] + + # Create InputParams object (only if we have params) + input_params = ( + ElevenLabsTTSService.InputParams(**params_dict) if params_dict else None + ) + + logger.info( + f"ElevenLabs TTS config: model={model}, " + f"stability={params_dict.get('stability', 'default')}" + ) + + return ElevenLabsTTSService( + api_key=api_key, voice_id=voice_id, model=model, params=input_params + ) + + @staticmethod + def _create_deepgram_tts(api_key: str, voice_id: str, parameters: Dict[str, Any]): + """Create Deepgram TTS service""" + kwargs = { + 'api_key': api_key, + 'voice': voice_id, # voice_id IS the model (e.g., "aura-2-helena-en") + } + + # Optional parameters + if 'base_url' in parameters: + kwargs['base_url'] = parameters['base_url'] + if 'encoding' in parameters: + kwargs['encoding'] = parameters['encoding'] + if 'sample_rate' in parameters: + kwargs['sample_rate'] = parameters['sample_rate'] + + logger.info(f'Deepgram TTS config: voice={voice_id}') + return DeepgramTTSService(**kwargs) + + @staticmethod + def _create_cartesia_tts(api_key: str, voice_id: str, parameters: Dict[str, Any]): + """Create Cartesia TTS service""" + # Model is a direct parameter + model = parameters.get('model', 'sonic-2') + + # Build InputParams from the parameters dict + params_dict = {} + + if 'language' in parameters: + # Convert string to Language enum if needed + lang = parameters['language'] + if isinstance(lang, str): + try: + params_dict['language'] = Language(lang) + except ValueError: + logger.warning(f"Unknown language '{lang}', skipping") + else: + params_dict['language'] = lang + + if 'speed' in parameters: + params_dict['speed'] = parameters['speed'] + if 'emotion' in parameters: + params_dict['emotion'] = parameters['emotion'] + + # Create InputParams object (only if we have params) + input_params = ( + CartesiaTTSService.InputParams(**params_dict) if params_dict else None + ) + + logger.info(f'Cartesia TTS config: voice={voice_id}, model={model}') + + return CartesiaTTSService( + api_key=api_key, voice_id=voice_id, model=model, params=input_params + ) diff --git a/wavefront/server/apps/call_processing/call_processing/services/voice_agent_cache_service.py b/wavefront/server/apps/call_processing/call_processing/services/voice_agent_cache_service.py new file mode 100644 index 00000000..c3c84890 --- /dev/null +++ b/wavefront/server/apps/call_processing/call_processing/services/voice_agent_cache_service.py @@ -0,0 +1,264 @@ +"""Voice agent cache service for fetching configs from Redis""" + +import asyncio +from typing import Dict, Any, List, Tuple, Optional +from uuid import UUID + +from call_processing.cache.cache_manager import CacheManager +from call_processing.cache.cache_utils import ( + get_voice_agent_cache_key, + get_llm_config_cache_key, + get_tts_config_cache_key, + get_stt_config_cache_key, + get_telephony_config_cache_key, +) +from call_processing.log.logger import logger +from call_processing.services.floware_http_client import FlowareHttpClient +from fastapi import HTTPException + + +class VoiceAgentCacheService: + """Service for fetching voice agent configurations from Redis cache""" + + def __init__( + self, + cache_manager: CacheManager, + floware_http_client: Optional[FlowareHttpClient] = None, + ): + self.cache_manager = cache_manager + self.floware_http_client = floware_http_client + self.cache_ttl = 3600 * 24 + + async def _fetch_missing_configs_from_api( + self, missing_configs: List[Tuple[str, UUID]] + ) -> Dict[str, Any]: + """ + Fetch missing configs from floware API in parallel + + Args: + missing_configs: List of (config_type, config_id) tuples + + Returns: + Dict mapping config_type to fetched config data + + Raises: + HTTPException: If any API call fails + """ + if not self.floware_http_client: + raise HTTPException( + status_code=500, + detail='Floware HTTP client not configured for API fallback', + ) + + # Create parallel fetch tasks + tasks = [ + self.floware_http_client.fetch_config(config_type, config_id) + for config_type, config_id in missing_configs + ] + + try: + # Execute all fetches in parallel + results = await asyncio.gather(*tasks, return_exceptions=True) + + # Check for errors + configs = {} + errors = [] + + for (config_type, config_id), result in zip(missing_configs, results): + if isinstance(result, Exception): + error_msg = f'{config_type} {config_id}: {str(result)}' + errors.append(error_msg) + logger.error(f'Failed to fetch {error_msg}') + elif result is None: + error_msg = f'{config_type} {config_id} not found (404)' + errors.append(error_msg) + logger.error(f'Config not found: {error_msg}') + else: + configs[config_type] = result + logger.info( + f'Successfully fetched {config_type} {config_id} from API' + ) + + if errors: + raise HTTPException( + status_code=404, + detail=f"Failed to fetch configs from API: {', '.join(errors)}", + ) + + return configs + + except HTTPException: + raise + except Exception as e: + logger.error(f'Error in parallel config fetch: {e}', exc_info=True) + raise + + def _cache_config( + self, config_type: str, config_id: UUID, config_data: Dict + ) -> None: + """Cache a config with appropriate key""" + cache_key_funcs = { + 'llm_inference_config': get_llm_config_cache_key, + 'tts_config': get_tts_config_cache_key, + 'stt_config': get_stt_config_cache_key, + 'telephony_config': get_telephony_config_cache_key, + } + + cache_key_func = cache_key_funcs.get(config_type) + if cache_key_func: + cache_key = cache_key_func(config_id) + self.cache_manager.set_json(cache_key, config_data, expiry=self.cache_ttl) + logger.info(f'Cached {config_type} {config_id}') + + async def get_all_agent_configs(self, agent_id: UUID) -> Dict[str, Any]: + """ + Fetch voice agent and all related configs from cache, with API fallback + + Strategy: + 1. Try fetching voice agent from cache + 2. If voice agent missing, fetch from floware API + 3. Try fetching all configs from cache + 4. For any missing configs, fetch them from floware API in parallel + 5. Cache the newly fetched configs for future requests + 6. If any API calls fail, raise HTTPException + + Args: + agent_id: Voice agent UUID + + Returns: + { + 'agent': {...}, + 'llm_config': {...}, + 'tts_config': {...}, + 'stt_config': {...}, + 'telephony_config': {...} + } + + Raises: + HTTPException: If agent not found or configs cannot be fetched + """ + # Fetch voice agent from cache + agent_key = get_voice_agent_cache_key(agent_id) + agent = self.cache_manager.get_json(agent_key) + + # If agent not in cache, fetch from API + if not agent: + logger.info(f'Voice agent {agent_id} not found in cache, fetching from API') + if not self.floware_http_client: + logger.error( + f'Voice agent {agent_id} not found and no HTTP client configured' + ) + raise HTTPException( + status_code=404, detail=f'Voice agent {agent_id} not found' + ) + + try: + agent = await self.floware_http_client.fetch_voice_agent(agent_id) + if not agent: + raise HTTPException( + status_code=404, + detail=f'Voice agent {agent_id} not found in floware API', + ) + # Cache the fetched agent + self.cache_manager.set_json(agent_key, agent, expiry=self.cache_ttl) + logger.info(f'Cached voice agent {agent_id} from API') + except Exception as e: + logger.error(f'Failed to fetch voice agent {agent_id} from API: {e}') + raise HTTPException( + status_code=404, detail=f'Voice agent {agent_id} not found' + ) + + # Extract config IDs from agent + llm_config_id = agent.get('llm_config_id') + tts_config_id = agent.get('tts_config_id') + stt_config_id = agent.get('stt_config_id') + telephony_config_id = agent.get('telephony_config_id') + + if not all([llm_config_id, tts_config_id, stt_config_id]): + logger.error(f'Voice agent {agent_id} missing required config IDs') + raise HTTPException( + status_code=500, + detail=f'Voice agent {agent_id} has incomplete configuration', + ) + + # Try fetching all configs from cache first + llm_config = self.cache_manager.get_json( + get_llm_config_cache_key(llm_config_id) + ) + tts_config = self.cache_manager.get_json( + get_tts_config_cache_key(tts_config_id) + ) + stt_config = self.cache_manager.get_json( + get_stt_config_cache_key(stt_config_id) + ) + telephony_config = self.cache_manager.get_json( + get_telephony_config_cache_key(telephony_config_id) + ) + + # Identify missing configs + missing_configs = [] + if not llm_config: + missing_configs.append(('llm_inference_config', llm_config_id)) + if not tts_config: + missing_configs.append(('tts_config', tts_config_id)) + if not stt_config: + missing_configs.append(('stt_config', stt_config_id)) + if telephony_config_id and not telephony_config: + missing_configs.append(('telephony_config', telephony_config_id)) + + # If any configs are missing, fetch from API + if missing_configs: + logger.info( + f'Missing {len(missing_configs)} configs for agent {agent_id}, ' + f'fetching from floware API: {missing_configs}' + ) + + # Fetch missing configs in parallel + fetched_configs = await self._fetch_missing_configs_from_api( + missing_configs + ) + + # Update local variables with fetched configs and cache them + for config_type, config_id in missing_configs: + config_data = fetched_configs[config_type] + + if config_type == 'llm_inference_config': + llm_config = config_data + self._cache_config(config_type, llm_config_id, config_data) + elif config_type == 'tts_config': + tts_config = config_data + self._cache_config(config_type, tts_config_id, config_data) + elif config_type == 'stt_config': + stt_config = config_data + self._cache_config(config_type, stt_config_id, config_data) + elif config_type == 'telephony_config': + telephony_config = config_data + self._cache_config(config_type, telephony_config_id, config_data) + + # Final validation + if not all([llm_config, tts_config, stt_config, telephony_config]): + missing = [] + if not llm_config: + missing.append(f'LLM config {llm_config_id}') + if not tts_config: + missing.append(f'TTS config {tts_config_id}') + if not stt_config: + missing.append(f'STT config {stt_config_id}') + if not telephony_config: + missing.append(f'Telephony config {telephony_config_id}') + + logger.error(f'Still missing configs after API fetch: {", ".join(missing)}') + raise HTTPException( + status_code=500, + detail=f'Failed to fetch all required configs: {", ".join(missing)}', + ) + + logger.info(f'Successfully fetched all configs for voice agent {agent_id}') + + return { + 'agent': agent, + 'llm_config': llm_config, + 'tts_config': tts_config, + 'stt_config': stt_config, + 'telephony_config': telephony_config, + } diff --git a/wavefront/server/apps/call_processing/pyproject.toml b/wavefront/server/apps/call_processing/pyproject.toml new file mode 100644 index 00000000..66be64a1 --- /dev/null +++ b/wavefront/server/apps/call_processing/pyproject.toml @@ -0,0 +1,34 @@ +[project] +name = "call_processing" +version = "0.1.0" +description = "Voice agents call processing backend for rootflo apps" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + # FastAPI stack + "fastapi>=0.115.2,<1.0.0", + "uvicorn>=0.30.5,<1.0.0", + "python-multipart>=0.0.9", + "python-dotenv>=1.1.0,<2.0.0", + "pydantic>=2.0.0", + "dependency-injector>=4.46.0,<5.0.0", + # HTTP client + "httpx>=0.27.0", + # Redis and caching + "redis>=5.0.0", + "tenacity>=8.0.0", + # Pipecat and voice processing + "pipecat-ai[websocket,cartesia,google,silero,deepgram,groq,runner]==0.0.91", + # Twilio + "twilio>=8.0.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["call_processing"] diff --git a/wavefront/server/apps/floconsole/floconsole/__init__.py b/wavefront/server/apps/floconsole/floconsole/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/floconsole/floconsole/authorization/require_auth.py b/wavefront/server/apps/floconsole/floconsole/authorization/require_auth.py new file mode 100644 index 00000000..f904bf87 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/authorization/require_auth.py @@ -0,0 +1,136 @@ +from dataclasses import dataclass + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse + +from floconsole.constants.auth import AUTH_ROLE_ID +from floconsole.di.application_container import ApplicationContainer +from floconsole.db.models.session import Session +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository +from floconsole.services.token_service import TokenService + +import jwt +from starlette.middleware.base import BaseHTTPMiddleware + +optional_auth_apis = [ + '/floconsole/v1/health', + '/floconsole/v1/authenticate', + '/', + '/docs', + '/openapi.json', +] + + +@dataclass +class UserSession: + role_id: str + user_id: str + session_id: str + + +class RequireAuthMiddleware(BaseHTTPMiddleware): + @inject + async def dispatch( + self, + request: Request, + call_next, + token_service: TokenService = Provide[ApplicationContainer.token_service], + response_formatter: ResponseFormatter = Provide[ + CommonContainer.response_formatter + ], + session_repository: SQLAlchemyRepository[Session] = Provide[ + ApplicationContainer.session_repository + ], + ): + try: + if request.method == 'OPTIONS': + return await call_next(request) + + authorization = request.headers.get('Authorization') + + token = None + if authorization and authorization.startswith('Bearer '): + token = authorization.split(' ')[1] + if request.url.path in optional_auth_apis: + return await call_next(request) + + elif token is None: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Token missing in request' + ), + ) + + try: + decoded = token_service.decode_token(token) + except ValueError as e: + logger.error(f'Token validation error: {e}') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid token format' + ), + ) + if 'session_id' not in decoded: + logger.error('Invalid token: missing session_id') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid token: session not found' + ), + ) + + if 'role_id' not in decoded or decoded['role_id'] != AUTH_ROLE_ID: + logger.error('Invalid token: Not the console user') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid token: Not the console user' + ), + ) + + # If not in cache, fetch from DB + session = await session_repository.find_one(id=decoded['session_id']) + if not session: + logger.error('Invalid session: session not found in database') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid session' + ), + ) + + if str(session.user_id) != decoded['user_id']: + logger.error('Invalid session: session does not belong to user') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid session' + ), + ) + + session_obj = UserSession( + role_id=decoded['role_id'], + user_id=decoded['user_id'], + session_id=decoded['session_id'], + ) + request.state.session = session_obj + + response = await call_next(request) + return response + + except jwt.ExpiredSignatureError as exc: + logger.error(f'ExpiredSignatureError in require_auth middleware: {exc}') + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Token has expired. Please log in again.' + ), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/config.ini b/wavefront/server/apps/floconsole/floconsole/config.ini new file mode 100644 index 00000000..bed37fee --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/config.ini @@ -0,0 +1,22 @@ +[database] +username = ${CONSOLE_DB_USERNAME} +password = ${CONSOLE_DB_PASSWORD} +host = ${CONSOLE_DB_HOST} +port = ${CONSOLE_DB_PORT} +db_name = ${CONSOLE_DB_NAME} + +[env_config] +app_env = ${APP_ENV} + +[jwt_token] +token_expiry=${TOKEN_EXPIRY} +temporary_token_expiry=${TEMPORARY_TOKEN_EXPIRY} +private_key=${PRIVATE_KEY} +public_key=${PUBLIC_KEY} +enable_cloud_kms=${ENABLE_CLOUD_KMS} +token_prefix=${CONSOLE_TOKEN_PREFIX:fc_} +issuer=${CONSOLE_JWT_ISSUER:https://console.rootflo.ai} +audience=${CONSOLE_JWT_AUDIENCE:https://console.rootflo.ai} + +[super_admin] +email = ${SUPER_ADMIN_EMAIL:vishnu@rootflo.ai} diff --git a/wavefront/server/apps/floconsole/floconsole/constants/app.py b/wavefront/server/apps/floconsole/floconsole/constants/app.py new file mode 100644 index 00000000..aa0d825d --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/constants/app.py @@ -0,0 +1,12 @@ +from enum import Enum + + +class AppStatus(str, Enum): + SUCCESS = 'success' + IN_PROGRESS = 'in_progress' + FAILED = 'failed' + + +class AppDeploymentType(str, Enum): + MANUAL = 'manual' + AUTO = 'auto' diff --git a/wavefront/server/apps/floconsole/floconsole/constants/auth.py b/wavefront/server/apps/floconsole/floconsole/constants/auth.py new file mode 100644 index 00000000..00818eb8 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/constants/auth.py @@ -0,0 +1,10 @@ +"""Authentication constants.""" + + +class RootfloHeaders: + CLIENT_KEY = 'X-Rootflo-Key' + PASSTHROUGH = 'X-Passthrough' + + +AUTH_ROLE_ID = 'floconsole' +SERVICE_AUTH_ROLE_ID = 'floconsole-service' diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/__init__.py b/wavefront/server/apps/floconsole/floconsole/controllers/__init__.py new file mode 100644 index 00000000..20a40fab --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/controllers/__init__.py @@ -0,0 +1 @@ +# Controllers package for FloConsole app diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py new file mode 100644 index 00000000..639ff468 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py @@ -0,0 +1,394 @@ +import requests + +from typing import Optional +from uuid import UUID + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from common_module.log.logger import logger +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter, Query +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from pydantic import BaseModel + +from floconsole.services.app_service import AppService +from floconsole.di.application_container import ApplicationContainer +from floconsole.authorization.require_auth import UserSession +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository +from floconsole.db.models.user import User +from floconsole.constants.app import AppDeploymentType, AppStatus + +build_trigger_url = 'https://cloudbuild.googleapis.com/v1/projects/aesy-330511/locations/asia-south1/triggers/new-app:webhook?key=AIzaSyA_cDcmEHojgD7SG2OI2_6DYSBMeLY8kWk&trigger=new-app&projectId=aesy-330511&secret=Buildtriggersecret' + +app_router = APIRouter(prefix='/v1') + + +class CreateAppRequest(BaseModel): + app_name: str + app_url: Optional[str] = None + app_secret: Optional[str] = None + app_key: Optional[str] = None + deployment_type: AppDeploymentType = AppDeploymentType.MANUAL + type: str = 'custom' + + +class UpdateAppRequest(BaseModel): + deployment_type: Optional[str] = None + app_name: Optional[str] = None + app_url: Optional[str] = None + app_secret: Optional[str] = None + app_key: Optional[str] = None + + +class AppResponse(BaseModel): + id: str + app_name: str + app_url: str + app_key: Optional[str] = None + status: AppStatus + config: dict + deployment_type: str + type: Optional[str] = None + created_at: Optional[str] = None + updated_at: Optional[str] = None + + @classmethod + def from_model(cls, app): + return cls( + id=str(app.id), + app_name=app.app_name, + app_url=app.app_url, + app_key=app.app_key, + status=app.status, + config=app.config, + deployment_type=app.deployment_type, + type=app.type, + created_at=app.created_at.isoformat() if app.created_at else None, + updated_at=app.updated_at.isoformat() if app.updated_at else None, + ) + + +@app_router.get('/apps') +@inject +async def get_apps( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), +): + apps = await app_service.get_all_apps() + apps_data = [AppResponse.from_model(app).model_dump() for app in apps] + + logger.info(f'Retrieved {len(apps)} apps successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'apps': apps_data}), + ) + + +@app_router.post('/apps') +@inject +async def create_app( + app_data: CreateAppRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), +): + try: + app = await app_service.get_app_by_name(app_data.app_name) + if app: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'App with this name already exists' + ), + ) + if app_data.deployment_type == AppDeploymentType.MANUAL: + if not app_data.app_secret or not app_data.app_key or not app_data.app_url: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'App secret, app key and app URL are required' + ), + ) + app_url = app_data.app_url + else: + app_url = f'https://{app_data.app_name}.apps.rootflo.ai' + + data = { + 'deployment': { + 'action': 'apply', + }, + 'app': { + 'name': app_data.app_name, + }, + } + + response = requests.post(build_trigger_url, json=data) + + if response.status_code != 200: + logger.error(f'Failed to create app: {response.json()}') + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Failed to create app' + ), + ) + app_status = ( + AppStatus.SUCCESS + if app_data.deployment_type == AppDeploymentType.MANUAL + else AppStatus.IN_PROGRESS + ) + + app = await app_service.create_app( + app_name=app_data.app_name, + app_url=app_url, + status=app_status, + app_secret=app_data.app_secret, + app_key=app_data.app_key, + deployment_type=app_data.deployment_type.value, + type=app_data.type, + config={}, + ) + + logger.info(f'App {app_data.app_name} create successfully') + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'app': AppResponse.from_model(app).model_dump(), + } + ), + ) + + except Exception as e: + logger.error(f'Failed to create app: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to create app: {str(e)}' + ), + ) + + +@app_router.get('/apps/{app_id}') +@inject +async def get_app( + app_id: UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), +): + app = await app_service.get_app_by_id(app_id) + + if not app: + logger.error(f'App with ID {app_id} not found') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('App not found'), + ) + + logger.info(f'App {app.app_name} retrieved successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'app': AppResponse.from_model(app).model_dump()} + ), + ) + + +@app_router.patch('/apps/{app_id}') +@inject +async def update_app( + app_id: UUID, + app_data: UpdateAppRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), +): + # Prepare update data, filtering out None values + update_data = {k: v for k, v in app_data.model_dump().items() if v is not None} + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + try: + app = await app_service.update_app(app_id, **update_data) + + if not app: + logger.error(f'App with ID {app_id} not found for update') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('App not found'), + ) + + logger.info(f'App {app.app_name} updated successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'app': AppResponse.from_model(app).model_dump()} + ), + ) + except Exception as e: + logger.error(f'Failed to update app: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to update app: {str(e)}' + ), + ) + + +@app_router.delete('/apps/{app_id}') +@inject +async def delete_app( + app_id: UUID, + request: Request, + delete_deployment: bool = Query( + True, description='Whether to delete the deployment' + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), + config: dict = Depends(Provide[ApplicationContainer.config]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), +): + try: + session: UserSession = request.state.session + user_id = session.user_id + super_admin_emails = config['super_admin']['email'].split(',') + + user = await user_repository.find_one(id=user_id) + + if not user or user.email not in super_admin_emails: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'You are not authorized to delete this app' + ), + ) + + app = await app_service.get_app_by_id(app_id) + + if not app: + logger.error(f'App with ID {app_id} not found') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('App not found'), + ) + + app_name = app.app_name + + if delete_deployment: + data = { + 'deployment': { + 'action': 'destroy', + }, + 'app': { + 'name': app_name, + }, + } + + response = requests.post(build_trigger_url, json=data) + + if response.status_code != 200: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Failed to delete app' + ), + ) + + deleted_app = await app_service.delete_app(app_id) + + if not deleted_app: + logger.error(f'App with ID {app_id} not found for deletion') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('App not found'), + ) + + logger.info(f'App {app_name} deleted successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'App deleted successfully'} + ), + ) + except Exception as e: + logger.error(f'Failed to delete app: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to delete app: {str(e)}' + ), + ) + + +@app_router.get('/apps/{app_id}/status') +@inject +async def get_app_status( + app_id: UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), +): + app = await app_service.get_app_by_id(app_id) + + if not app: + logger.error(f'App with ID {app_id} not found') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('App not found'), + ) + + url = f'https://{app.app_name}-floware.apps.rootflo.ai/floware' + + response = requests.get(url + '/v1/health') + + if response.status_code != 200: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'status': app.status}), + ) + + hmac_response = requests.post( + url + '/v1/developer/secrets', headers={'X-Passthrough': 'secret'} + ) + res_json = hmac_response.json() + + if hmac_response.status_code != 201: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'status': app.status}), + ) + + await app_service.update_app( + app_id, + status=AppStatus.SUCCESS, + app_key=res_json['data']['client_key'], + app_secret=res_json['data']['client_secret'], + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'status': 'success'}), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/auth_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/auth_controller.py new file mode 100644 index 00000000..e53d7613 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/controllers/auth_controller.py @@ -0,0 +1,122 @@ +from uuid import uuid4 + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from pydantic import BaseModel + +from floconsole.utils.password_utils import verify_password +from floconsole.constants.auth import AUTH_ROLE_ID +from floconsole.db.models.user import User +from floconsole.db.models.session import Session +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository +from floconsole.di.application_container import ApplicationContainer +from floconsole.services.token_service import TokenService + +auth_router = APIRouter(prefix='/v1') + + +class AuthRequest(BaseModel): + email: str + password: str + + +@auth_router.get('/health') +def health_check(): + return {'status': 'ok'} + + +@auth_router.post('/authenticate') +@inject +async def authenticate( + request: Request, + auth_data: AuthRequest, + token_service: TokenService = Depends(Provide[ApplicationContainer.token_service]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[ApplicationContainer.session_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + user = await user_repository.find_one(email=auth_data.email) + if user is None or not verify_password(auth_data.password, user.password): + # Handle failed login attempt + logger.error(f'Authentication failed for email: {auth_data.email}') + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Incorrect username or password' + ), + ) + if user.deleted: + logger.error(f'Authentication attempt for disabled user: {auth_data.email}') + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('User account is disabled'), + ) + + # Get device info from headers + device_info = request.headers.get('User-Agent') + + await session_repository.delete_all(user_id=user.id) + + # Create new session + session = await session_repository.create( + user_id=user.id, device_info=device_info, id=uuid4() + ) + + # Include session_id in token payload + token = token_service.create_token( + sub=user.email, + user_id=str(user.id), + role_id=AUTH_ROLE_ID, + payload={'session_id': str(session.id)}, + expiry=token_service.token_expiry, + ) + + response_data = {'access_token': token, 'token_type': 'bearer'} + + logger.info(f'User {auth_data.email} authenticated successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': response_data}), + ) + + +@auth_router.post('/logout') +@inject +async def logout( + request: Request, + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[ApplicationContainer.session_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + # Get the current session from request state + current_session = request.state.session + + # Delete the session from database + await session_repository.delete_all(id=current_session.session_id) + + logger.info(f'User logged out successfully - session: {current_session.session_id}') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Successfully logged out'} + ), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/floware_proxy_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/floware_proxy_controller.py new file mode 100644 index 00000000..bd20a616 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/controllers/floware_proxy_controller.py @@ -0,0 +1,171 @@ +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide + +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from floconsole.di.application_container import ApplicationContainer +from floconsole.services.floware_proxy_service import FlowareProxyService + +floware_proxy_router = APIRouter(prefix='/v1') + + +@floware_proxy_router.get('/{app_id}/floware/{path:path}') +@inject +async def proxy_get_request( + app_id: str, + path: str, + request: Request, + floware_proxy_service: FlowareProxyService = Depends( + Provide[ApplicationContainer.floware_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Proxy GET requests to floware service""" + try: + result = await floware_proxy_service.proxy_request( + method='GET', app_id=app_id, path=path, request=request + ) + logger.info(f'GET proxy request successful for app {app_id} path {path}') + return result + except Exception as e: + logger.error(f'GET proxy request failed for app {app_id} path {path}: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Proxy request failed: {str(e)}' + ), + ) + + +@floware_proxy_router.post('/{app_id}/floware/{path:path}') +@inject +async def proxy_post_request( + app_id: str, + path: str, + request: Request, + floware_proxy_service: FlowareProxyService = Depends( + Provide[ApplicationContainer.floware_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Proxy POST requests to floware service""" + try: + result = await floware_proxy_service.proxy_request( + method='POST', app_id=app_id, path=path, request=request + ) + logger.info(f'POST proxy request successful for app {app_id} path {path}') + return result + except Exception as e: + logger.error( + f'POST proxy request failed for app {app_id} path {path}: {str(e)}' + ) + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Proxy request failed: {str(e)}' + ), + ) + + +@floware_proxy_router.put('/{app_id}/floware/{path:path}') +@inject +async def proxy_put_request( + app_id: str, + path: str, + request: Request, + floware_proxy_service: FlowareProxyService = Depends( + Provide[ApplicationContainer.floware_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Proxy PUT requests to floware service""" + try: + result = await floware_proxy_service.proxy_request( + method='PUT', app_id=app_id, path=path, request=request + ) + logger.info(f'PUT proxy request successful for app {app_id} path {path}') + return result + except Exception as e: + logger.error(f'PUT proxy request failed for app {app_id} path {path}: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Proxy request failed: {str(e)}' + ), + ) + + +@floware_proxy_router.patch('/{app_id}/floware/{path:path}') +@inject +async def proxy_patch_request( + app_id: str, + path: str, + request: Request, + floware_proxy_service: FlowareProxyService = Depends( + Provide[ApplicationContainer.floware_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Proxy PATCH requests to floware service""" + try: + result = await floware_proxy_service.proxy_request( + method='PATCH', app_id=app_id, path=path, request=request + ) + logger.info(f'PATCH proxy request successful for app {app_id} path {path}') + return result + except Exception as e: + logger.error( + f'PATCH proxy request failed for app {app_id} path {path}: {str(e)}' + ) + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Proxy request failed: {str(e)}' + ), + ) + + +@floware_proxy_router.delete('/{app_id}/floware/{path:path}') +@inject +async def proxy_delete_request( + app_id: str, + path: str, + request: Request, + floware_proxy_service: FlowareProxyService = Depends( + Provide[ApplicationContainer.floware_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Proxy DELETE requests to floware service""" + try: + result = await floware_proxy_service.proxy_request( + method='DELETE', app_id=app_id, path=path, request=request + ) + logger.info(f'DELETE proxy request successful for app {app_id} path {path}') + return result + except Exception as e: + logger.error( + f'DELETE proxy request failed for app {app_id} path {path}: {str(e)}' + ) + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Proxy request failed: {str(e)}' + ), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py new file mode 100644 index 00000000..ac8ea137 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py @@ -0,0 +1,97 @@ +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from pydantic import BaseModel + +from floconsole.db.models.user import User +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository +from floconsole.di.application_container import ApplicationContainer +from floconsole.utils.user_utils import get_current_user +from floconsole.utils.password_utils import hash_password + + +user_router = APIRouter(prefix='/v1') + + +class CreateUserRequest(BaseModel): + email: str + password: str + first_name: str + last_name: str + + +@user_router.post('/users') +@inject +async def create_user( + user_data: CreateUserRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), +): + existing_user = await user_repository.find_one(email=user_data.email) + + if existing_user: + logger.warning( + f'User creation failed - email already exists: {user_data.email}' + ) + return JSONResponse( + status_code=status.HTTP_409_CONFLICT, + content=response_formatter.buildErrorResponse('Email already exists'), + ) + + hashed_password = hash_password(user_data.password) + + created_user = await user_repository.create( + email=user_data.email, + password=hashed_password, + first_name=user_data.first_name, + last_name=user_data.last_name, + ) + + logger.info(f'User created successfully: {created_user.email}') + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + {'user': created_user.to_dict()} + ), + ) + + +@user_router.get('/whoami') +@inject +async def get_resources( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), +): + _, user_id, _ = get_current_user(request) + user = await user_repository.find_one(id=user_id) + + if not user: + logger.error(f'User not found for ID: {user_id}') + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('User not found'), + ) + + logger.info(f'User {user.email} retrieved successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': user.to_dict()}), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/db/__init__.py b/wavefront/server/apps/floconsole/floconsole/db/__init__.py new file mode 100644 index 00000000..b0c9de4f --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/__init__.py @@ -0,0 +1,16 @@ +# Database imports and utilities for FloConsole app +from .connection import DatabaseClient, DatabaseConfig +from .models.user import User +from .models.session import Session +from .models.app import App +from .repositories.sql_alchemy_repository import SQLAlchemyRepository + +# Export commonly used database components +__all__ = [ + 'DatabaseClient', + 'DatabaseConfig', + 'User', + 'Session', + 'App', + 'SQLAlchemyRepository', +] diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic.ini b/wavefront/server/apps/floconsole/floconsole/db/alembic.ini new file mode 100644 index 00000000..ba143822 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic.ini @@ -0,0 +1,82 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = %(db_url)s + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME \ No newline at end of file diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/env.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/env.py new file mode 100644 index 00000000..38d7ee90 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/env.py @@ -0,0 +1,87 @@ +import os + +from alembic import context +from dotenv import load_dotenv +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from floconsole.db.base import Base +from floconsole.db.models.user import User +from floconsole.db.models.session import Session + +# Load environment variables +load_dotenv() + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Import all models here to ensure they're registered with Base.metadata +models = [ + User, + Session, +] + +target_metadata = Base.metadata + +# Get database URL from environment variables +db_user_name = os.getenv('CONSOLE_DB_USERNAME') +db_password = os.getenv('CONSOLE_DB_PASSWORD') +db_host = os.getenv('CONSOLE_DB_HOST') +db_port = os.getenv('CONSOLE_DB_PORT') +db_name = os.getenv('CONSOLE_DB_NAME') + +db_url = f'postgresql://{db_user_name}:{db_password}@{db_host}:{db_port}/{db_name}' + +config.set_main_option('sqlalchemy.url', db_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + + url = config.get_main_option('sqlalchemy.url') + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={'paramstyle': 'named'}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/script.py.mako b/wavefront/server/apps/floconsole/floconsole/db/alembic/script.py.mako new file mode 100644 index 00000000..37d0cac3 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} \ No newline at end of file diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-73bcd253dd62_create_user_table.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-73bcd253dd62_create_user_table.py new file mode 100644 index 00000000..8776fbea --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-73bcd253dd62_create_user_table.py @@ -0,0 +1,40 @@ +"""create user table + +Revision ID: 73bcd253dd62 +Revises: +Create Date: 2025-08-18 15:43:00.525886 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '73bcd253dd62' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'user', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('password', sa.String(), nullable=False), + sa.Column('first_name', sa.String(), nullable=False), + sa.Column('last_name', sa.String(), nullable=False), + sa.Column('deleted', sa.Boolean(), nullable=False, server_default='false'), + sa.Column('failed_attempts', sa.Integer(), nullable=False, server_default='0'), + sa.Column('locked_until', sa.DateTime(), nullable=True), + sa.Column('last_failed_attempt', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('email'), + ) + op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) + + +def downgrade() -> None: + op.drop_index(op.f('ix_user_id'), table_name='user') + op.drop_table('user') diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-e3a2fa91cda2_create_session_table.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-e3a2fa91cda2_create_session_table.py new file mode 100644 index 00000000..a63e0756 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1543-e3a2fa91cda2_create_session_table.py @@ -0,0 +1,40 @@ +"""create session table + +Revision ID: e3a2fa91cda2 +Revises: 73bcd253dd62 +Create Date: 2025-08-18 15:43:35.644982 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'e3a2fa91cda2' +down_revision = '73bcd253dd62' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'user_session', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('user_id', sa.Uuid(), nullable=False), + sa.Column('device_info', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_user_session_id'), 'user_session', ['id'], unique=False) + op.create_index( + op.f('ix_user_session_user_id'), 'user_session', ['user_id'], unique=False + ) + + +def downgrade() -> None: + op.drop_index(op.f('ix_user_session_user_id'), table_name='user_session') + op.drop_index(op.f('ix_user_session_id'), table_name='user_session') + op.drop_table('user_session') diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1554-521ae4960bcf_add_seed_user.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1554-521ae4960bcf_add_seed_user.py new file mode 100644 index 00000000..16093afd --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_18_1554-521ae4960bcf_add_seed_user.py @@ -0,0 +1,73 @@ +"""add seed user + +Revision ID: 521ae4960bcf +Revises: e3a2fa91cda2 +Create Date: 2025-08-18 15:54:31.167317 + +""" + +from alembic import op +import sqlalchemy as sa +import uuid +import os + +from floconsole.utils.password_utils import hash_password + + +# revision identifiers, used by Alembic. +revision = '521ae4960bcf' +down_revision = 'e3a2fa91cda2' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Get user details from environment variables + email = os.getenv('CONSOLE_EMAIL') + password = os.getenv('CONSOLE_PASSWORD') + f_name = os.getenv('CONSOLE_FIRST_NAME') + l_name = os.getenv('CONSOLE_LAST_NAME') + + # Skip if environment variables are not set + if not all([email, password, f_name, l_name]): + print( + 'Skipping seed user creation - missing environment variables (CONSOLE_EMAIL, CONSOLE_PASSWORD, CONSOLE_FIRST_NAME, CONSOLE_LAST_NAME)' + ) + return + + assert email is not None + assert password is not None + assert f_name is not None + assert l_name is not None + + hashed_password = hash_password(password) + + # Insert seed user using connection + conn = op.get_bind() + conn.execute( + sa.text(""" + INSERT INTO "user" (id, email, password, first_name, last_name, deleted, failed_attempts, locked_until, last_failed_attempt) + VALUES (:id, :email, :password, :first_name, :last_name, :deleted, :failed_attempts, :locked_until, :last_failed_attempt) + """), + { + 'id': uuid.uuid4(), + 'email': email, + 'password': hashed_password, # Should be hashed before passing to migration + 'first_name': f_name, + 'last_name': l_name, + 'deleted': False, + 'failed_attempts': 0, + 'locked_until': None, + 'last_failed_attempt': None, + }, + ) + + +def downgrade() -> None: + # Get email from environment variable + email = os.getenv('CONSOLE_EMAIL') + if email: + conn = op.get_bind() + conn.execute( + sa.text('DELETE FROM "user" WHERE email = :email'), {'email': email} + ) diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_21_1251-ac10dc573599_create_app_table.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_21_1251-ac10dc573599_create_app_table.py new file mode 100644 index 00000000..e1024039 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_08_21_1251-ac10dc573599_create_app_table.py @@ -0,0 +1,42 @@ +"""create app table + +Revision ID: ac10dc573599 +Revises: 521ae4960bcf +Create Date: 2025-08-21 12:51:55.818597 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'ac10dc573599' +down_revision = '521ae4960bcf' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'app', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('app_name', sa.String(), nullable=False), + sa.Column('app_url', sa.String(), nullable=False), + sa.Column('app_secret', sa.String(), nullable=False), + sa.Column('app_key', sa.String(), nullable=False), + sa.Column('deleted', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_app_id'), 'app', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_app_id'), table_name='app') + op.drop_table('app') + # ### end Alembic commands ### diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_10_14_1514-e9a4691e0732_update_apps_table.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_10_14_1514-e9a4691e0732_update_apps_table.py new file mode 100644 index 00000000..b2996860 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_10_14_1514-e9a4691e0732_update_apps_table.py @@ -0,0 +1,41 @@ +"""Update apps table + +Revision ID: 49e97617960c +Revises: ac10dc573599 +Create Date: 2025-10-14 14:23:30.306132 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '49e97617960c' +down_revision = 'ac10dc573599' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + 'app', + sa.Column('status', sa.String(), nullable=False, server_default='success'), + ) + op.add_column( + 'app', sa.Column('config', sa.JSON(), nullable=False, server_default='{}') + ) + + op.alter_column('app', 'app_secret', nullable=True) + op.alter_column('app', 'app_key', nullable=True) + + op.alter_column('app', 'status', server_default=None) + op.alter_column('app', 'config', server_default=None) + + +def downgrade() -> None: + op.alter_column('app', 'app_secret', nullable=False) + op.alter_column('app', 'app_key', nullable=False) + + op.drop_column('app', 'config') + op.drop_column('app', 'status') diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_11_24_1228-480783ba0ace_adding_type_column_to_app_table.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_11_24_1228-480783ba0ace_adding_type_column_to_app_table.py new file mode 100644 index 00000000..a05909fc --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_11_24_1228-480783ba0ace_adding_type_column_to_app_table.py @@ -0,0 +1,40 @@ +"""Adding type column to app table + +Revision ID: 480783ba0ace +Revises: 49e97617960c +Create Date: 2025-11-24 12:28:36.127318 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '480783ba0ace' +down_revision = '49e97617960c' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + 'app', + sa.Column( + 'deployment_type', + sa.String(length=255), + nullable=False, + server_default='manual', + ), + ) + op.add_column( + 'app', + sa.Column( + 'type', sa.String(length=255), nullable=False, server_default='custom' + ), + ) + + +def downgrade() -> None: + op.drop_column('app', 'deployment_type') + op.drop_column('app', 'type') diff --git a/wavefront/server/apps/floconsole/floconsole/db/base.py b/wavefront/server/apps/floconsole/floconsole/db/base.py new file mode 100644 index 00000000..59be7030 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/base.py @@ -0,0 +1,3 @@ +from sqlalchemy.orm import declarative_base + +Base = declarative_base() diff --git a/wavefront/server/apps/floconsole/floconsole/db/connection.py b/wavefront/server/apps/floconsole/floconsole/db/connection.py new file mode 100644 index 00000000..1977473d --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/connection.py @@ -0,0 +1,45 @@ +from dataclasses import dataclass +import os + +from alembic import command +from alembic.config import Config +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine + + +@dataclass +class DatabaseConfig: + username: str + password: str + host: str + port: str + db_name: str + + +class DatabaseClient: + def __init__(self, db_config: DatabaseConfig) -> None: + self.db_config = db_config + self._engine = create_async_engine( + f'postgresql+psycopg://{db_config.username}:{db_config.password}@{db_config.host}:{db_config.port}/{db_config.db_name}' + ) + self.session = async_sessionmaker(autocommit=False, bind=self._engine) + + async def close(self): + if self._engine is None: + raise Exception('DatabaseClient is not initialized') + await self._engine.dispose() + + self._engine = None + self.session = None + + async def connect(self) -> None: + if self._engine is None: + # logger.error('Error database connection ..') + raise Exception('DatabaseClient is not initialized') + + def run_migration(self): + current_script_dir = os.path.dirname(os.path.abspath(__file__)) + alembic = os.path.join(current_script_dir, 'alembic.ini') + absolute_file_path = os.path.abspath(alembic) + alembic_config = Config(absolute_file_path) + command.upgrade(alembic_config, 'head') diff --git a/wavefront/server/apps/floconsole/floconsole/db/models/__init__.py b/wavefront/server/apps/floconsole/floconsole/db/models/__init__.py new file mode 100644 index 00000000..49876ce7 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/models/__init__.py @@ -0,0 +1,7 @@ +# Models package for FloConsole app + +from .user import User +from .session import Session +from .app import App + +__all__ = ['User', 'Session', 'App'] diff --git a/wavefront/server/apps/floconsole/floconsole/db/models/app.py b/wavefront/server/apps/floconsole/floconsole/db/models/app.py new file mode 100644 index 00000000..e0b69f75 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/models/app.py @@ -0,0 +1,42 @@ +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import JSON +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..base import Base + + +class App(Base): + __tablename__ = 'app' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + app_name: Mapped[str] = mapped_column(nullable=False) + app_url: Mapped[str] = mapped_column(nullable=False) + app_secret: Mapped[str] = mapped_column(nullable=True) + app_key: Mapped[str] = mapped_column(nullable=True) + deleted: Mapped[bool] = mapped_column(default=False) + status: Mapped[str] = mapped_column(default='in_progress') + config: Mapped[dict] = mapped_column(JSON, default={}) + deployment_type: Mapped[str] = mapped_column(nullable=False, default='manual') + type: Mapped[str] = mapped_column(nullable=False, default='custom') + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[Optional[datetime]] = mapped_column(nullable=True) + + def to_dict(self): + return { + 'id': str(self.id), + 'app_name': self.app_name, + 'app_url': self.app_url, + 'app_key': self.app_key, + 'status': self.status, + 'config': self.config, + 'deployment_type': self.deployment_type, + 'type': self.type, + 'created_at': self.created_at.isoformat() if self.created_at else None, + 'updated_at': self.updated_at.isoformat() if self.updated_at else None, + } diff --git a/wavefront/server/apps/floconsole/floconsole/db/models/session.py b/wavefront/server/apps/floconsole/floconsole/db/models/session.py new file mode 100644 index 00000000..921098a4 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/models/session.py @@ -0,0 +1,28 @@ +from datetime import datetime +import uuid + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..base import Base + + +class Session(Base): + __tablename__ = 'user_session' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + user_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('user.id', ondelete='CASCADE'), nullable=False, index=True + ) + device_info: Mapped[str] = mapped_column(nullable=True) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column( + default=datetime.now, onupdate=datetime.now + ) + + # Relationship + user = relationship('User', back_populates='sessions') diff --git a/wavefront/server/apps/floconsole/floconsole/db/models/user.py b/wavefront/server/apps/floconsole/floconsole/db/models/user.py new file mode 100644 index 00000000..5fff0255 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/models/user.py @@ -0,0 +1,40 @@ +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..base import Base + + +class User(Base): + __tablename__ = 'user' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + email: Mapped[str] = mapped_column(nullable=False, unique=True) + password: Mapped[str] = mapped_column(nullable=False) + first_name: Mapped[str] = mapped_column(nullable=False) + last_name: Mapped[str] = mapped_column(nullable=False) + deleted: Mapped[bool] = mapped_column(default=False) + + # Account lockout fields + failed_attempts: Mapped[int] = mapped_column(default=0, nullable=False) + locked_until: Mapped[Optional[datetime]] = mapped_column(nullable=True) + last_failed_attempt: Mapped[Optional[datetime]] = mapped_column(nullable=True) + + # Add relationship for sessions + sessions = relationship( + 'Session', back_populates='user', cascade='all, delete-orphan' + ) + + def to_dict(self): + return { + 'id': str(self.id), + 'email': self.email, + 'first_name': self.first_name, + 'last_name': self.last_name, + } diff --git a/wavefront/server/apps/floconsole/floconsole/db/repositories/__init__.py b/wavefront/server/apps/floconsole/floconsole/db/repositories/__init__.py new file mode 100644 index 00000000..40270774 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/repositories/__init__.py @@ -0,0 +1 @@ +# Repository package for FloConsole app diff --git a/wavefront/server/apps/floconsole/floconsole/db/repositories/sql_alchemy_repository.py b/wavefront/server/apps/floconsole/floconsole/db/repositories/sql_alchemy_repository.py new file mode 100644 index 00000000..2909bd3f --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/repositories/sql_alchemy_repository.py @@ -0,0 +1,228 @@ +from typing import Any, Generic, Type, TypeVar + +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.sql import text + +from ..base import Base +from ..connection import DatabaseClient + +T = TypeVar('T', bound=Base) # type: ignore + + +class SQLAlchemyRepository(Generic[T]): + def __init__(self, model: Type[T], db_client: DatabaseClient): + """ + Initialize the repository with a specific model. + + :param model: The SQLAlchemy model class (subclass of Base). + """ + self.model: Type[T] = model + self.session: async_sessionmaker[AsyncSession] = db_client.session + + async def create(self, **kwargs) -> T: + """ + Create a new record in the database. + + :param kwargs: The fields and their values to create the record. + :return: The created instance of the model. + """ + async with self.session() as session: + session: AsyncSession + instance = self.model(**kwargs) + session.add(instance) + await session.commit() + await session.refresh(instance) + + return instance + + async def create_all( + self, + records: list[T], + replace: bool = False, + session: AsyncSession | None = None, + ): + """ + Create new records in the database. + + :param records: List of records + :param replace: Replace a record if it already exists. Default: False + :param session: Optional session for transaction management + :return: The created instances of the model. + """ + model_instances = [] + for data in records: + model_instances.append(data) + + if session: + for instance in model_instances: + await session.merge(instance) if replace else session.add(instance) + return records + else: + async with self.session() as session: + session: AsyncSession + for instance in model_instances: + await session.merge(instance) if replace else session.add(instance) + await session.commit() + return records + + async def find(self, limit: int = 100, **filters) -> list[T]: + """ + Find all records in the database matching the given filters. + + :param filters: The filters to apply to the query. + :return: A list of matching model instances. + """ + if 'session' in filters and isinstance(filters['session'], AsyncSession): + session = filters['session'] + del filters['session'] + query = select(self.model) + for key, value in filters.items(): + if isinstance(value, list): + query = query.where(getattr(self.model, key).in_(value)) + else: + query = query.where(getattr(self.model, key) == value) + query = query.limit(limit) + result = await session.scalars(query) + return list(result.all()) + + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + if isinstance(value, list): + query = query.where(getattr(self.model, key).in_(value)) + else: + query = query.where(getattr(self.model, key) == value) + query = query.limit(limit) + result = await session.scalars(query) + return list(result.all()) + + async def find_one(self, **filters) -> T | None: + """ + Find the first record in the database matching the given filters. + + :param filters: The filters to apply to the query. + :return: The first matching model instance, or None if no match is found. + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + return await session.scalar(query) + + async def find_one_and_update( + self, filters: dict[str, Any], refresh: bool = False, **update_data + ) -> T | None: + """ + Find the first record in the database matching the given filters, and update it with the provided data. + + :param filters: The filters to apply to the query. + :param update_data: The data to update the record with. + :return: The updated model instance, or None if no match is found. + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + instance = await session.scalar(query) + if instance: + for key, value in update_data.items(): + setattr(instance, key, value) + await session.commit() + if refresh: + await session.refresh( + instance + ) # Refresh to ensure object is properly attached + return instance + else: + return None + + async def delete_all(self, **filters) -> None: + """ + Delete all records in the database matching the given filters. + + :param filters: The filters to apply to the query. + """ + async with self.session() as session: + session: AsyncSession + query = delete(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + await session.execute(query) + await session.commit() + + async def check_empty(self) -> bool: + """ + Check if the database table is empty. + + :return: True if the table is empty, False otherwise. + """ + async with self.session() as session: + session: AsyncSession + count = await session.scalar(select(func.count()).select_from(self.model)) + return count == 0 + + async def count(self, **filters) -> int: + """ + retrieve all the data from the table + :return the count after applying the filters + """ + async with self.session() as session: + session: AsyncSession + query = select(func.count()).select_from(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + count = await session.scalar(query) + return int(count or 0) + + async def execute_query(self, query: str, params={}, model_class=None) -> list: + """ + Execute a raw SQL query asynchronously and return the results. + + :param query: The raw SQL string. + :return: A list of matching records. + """ + async with self.session() as session: + session: AsyncSession + result = await session.execute(text(query), params) + columns = result.keys() + rows = [dict(zip(columns, row)) for row in result.all()] + if model_class: + return [model_class(**row) for row in rows] + return rows + + async def upsert(self, filters: dict[str, Any], **update_values): + """ + Find the first record in the database matching the given filters + if the record exists it will update the record with specified filters + otherwise it will create an record with filters and update_values + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model).filter_by(**filters) + result = await session.execute(query) + existing_count = result.scalar_one_or_none() + if existing_count: + stmt = ( + update(self.model) + .where( + *( + getattr(self.model, key) == val + for key, val in filters.items() + ) + ) + .values(**update_values) + ) + await session.execute(stmt) + else: + stmt = insert(self.model).values({**filters, **update_values}) + await session.execute(stmt) + await session.commit() diff --git a/wavefront/server/apps/floconsole/floconsole/di/application_container.py b/wavefront/server/apps/floconsole/floconsole/di/application_container.py new file mode 100644 index 00000000..1005a04f --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/di/application_container.py @@ -0,0 +1,82 @@ +from dependency_injector import containers +from dependency_injector import providers + +from flo_cloud.kms import FloKmsService +from floconsole.db import ( + DatabaseClient, + DatabaseConfig, + User, + Session, + App, + SQLAlchemyRepository, +) +from floconsole.services.token_service import TokenService +from floconsole.services.floware_proxy_service import FlowareProxyService +from floconsole.services.app_service import AppService + + +class ApplicationContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['./config.ini']) + + # Common module container (external dependency) + common_container = providers.Dependency() + + # Database configuration and client + db_config = providers.Factory( + DatabaseConfig, + username=config.database.username, + password=config.database.password, + host=config.database.host, + port=config.database.port, + db_name=config.database.db_name, + ) + + db_client = providers.Singleton(DatabaseClient, db_config=db_config) + + # Repositories using generic SQLAlchemyRepository + user_repository = providers.Singleton( + SQLAlchemyRepository[User], model=User, db_client=db_client + ) + + session_repository = providers.Singleton( + SQLAlchemyRepository[Session], model=Session, db_client=db_client + ) + + app_repository = providers.Singleton( + SQLAlchemyRepository[App], model=App, db_client=db_client + ) + + # services + app_service = providers.Singleton(AppService, app_repository=app_repository) + + kms_service = providers.Selector( + config.jwt_token.enable_cloud_kms, + true=providers.Singleton( + FloKmsService, cloud_provider=config.cloud_config.cloud_provider + ), + false=providers.Object(None), # No KMS service if cloud KMS is not enabled + ) + + token_service = providers.Singleton( + TokenService, + private_key=config.jwt_token.private_key, + public_key=config.jwt_token.public_key, + kms_service=kms_service, + token_expiry=config.jwt_token.token_expiry, + temporary_token_expiry=config.jwt_token.temporary_token_expiry, + app_env=config.env_config.app_env, + token_prefix=config.jwt_token.token_prefix, + issuer=config.jwt_token.issuer, + audience=config.jwt_token.audience, + ) + + # Floware proxy service + floware_proxy_service = providers.Singleton( + FlowareProxyService, + token_service=token_service, + app_service=app_service, + service_issuer=config.jwt_token.issuer, + app_env=config.env_config.app_env, + token_prefix=config.jwt_token.token_prefix, + temporary_token_expiry=config.jwt_token.temporary_token_expiry, + ) diff --git a/wavefront/server/apps/floconsole/floconsole/server.py b/wavefront/server/apps/floconsole/floconsole/server.py new file mode 100644 index 00000000..14098691 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/server.py @@ -0,0 +1,170 @@ +from contextlib import asynccontextmanager +import glob +import os +from typing import Any, cast + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from dotenv import load_dotenv +from fastapi import FastAPI, HTTPException, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +import uvicorn +from starlette.middleware import _MiddlewareFactory + +from floconsole.authorization.require_auth import RequireAuthMiddleware +from floconsole.di.application_container import ApplicationContainer +from floconsole.controllers.app_controller import app_router +from floconsole.controllers.auth_controller import auth_router +from floconsole.controllers.floware_proxy_controller import floware_proxy_router +from floconsole.controllers.user_controller import user_router +from floconsole.db import DatabaseClient + +load_dotenv() + +# Initialize containers +common_container = CommonContainer(cache_manager=None) +application_container = ApplicationContainer(common_container=common_container) + +# Wire containers +application_container.wire( + modules=[__name__], + packages=[ + 'floconsole.controllers', + ], +) + +common_container.wire( + modules=[__name__], + packages=[ + 'floconsole.controllers', + ], +) + + +def _middleware(cls: type[Any]) -> _MiddlewareFactory[Any]: + return cast(_MiddlewareFactory[Any], cls) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup code + logger.info('Starting FloConsole application...') + + # Initialize database connection + db_client: DatabaseClient = application_container.db_client() + + if isinstance(db_client, DatabaseClient): + await db_client.connect() + else: + raise TypeError('db_client is not an instance of DatabaseClient') + + # Run database migrations + try: + db_client.run_migration() + logger.info('Database migrations completed successfully') + except Exception as e: + logger.error(f'Database migration failed: {e}') + raise + + yield + + # Shutdown code + logger.info('Shutting down FloConsole application...') + + # Close database connection + try: + await db_client.close() + logger.info('Database connection closed') + except Exception as e: + logger.error(f'Error closing database connection: {e}') + + +app = FastAPI( + title='FloConsole API', + description='Console application for RootFlo platform', + version='1.0.0', + lifespan=lifespan, +) + +origins = os.getenv('ALLOWED_ORIGINS', 'http://localhost:5173') +allowed_origins = origins.split(',') + +app.add_middleware(_middleware(RequestIdMiddleware)) +app.add_middleware(_middleware(RequireAuthMiddleware)) + +# Configure CORS with proper security settings +app.add_middleware( + _middleware(CORSMiddleware), + allow_origins=allowed_origins, + allow_credentials=True, + allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], + allow_headers=['*'], + expose_headers=[ + 'X-Content-Type-Options', + 'X-XSS-Protection', + 'X-Frame-Options', + 'Referrer-Policy', + 'Content-Security-Policy', + 'Pragma', + 'Expires', + 'Strict-Transport-Security', + 'Cache-Control', + ], +) + +# Include routers +app.include_router(auth_router, prefix='/floconsole') +app.include_router(floware_proxy_router, prefix='/floconsole') +app.include_router(user_router, prefix='/floconsole') +app.include_router(app_router, prefix='/floconsole') + + +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception): + # Skip HTTPExceptions (they're handled by FastAPI) + if isinstance(exc, HTTPException): + raise exc + + error_message = 'An unexpected error has occurred while performing this action, please try again' + if environment != 'production': + error_message += f' - {str(exc)}' + + logger.error(f'Error in API call: {exc}', exc_info=True) + + exception_response_formatter = ResponseFormatter() + return JSONResponse( + status_code=500, + content=exception_response_formatter.buildErrorResponse(error=error_message), + ) + + +environment = os.getenv('APP_ENV', 'dev') + +# Running with Uvicorn (for local development) +if __name__ == '__main__': + print(f'Starting application in environment: {environment}') + if environment == 'production': + uvicorn.run( + 'server:app', host='0.0.0.0', port=8002, workers=1, log_level='critical' + ) + print(f'Started application in environment: {environment}') + + else: + dirs = glob.glob('../../..//**/*_module/**', recursive=True) + dirs.extend(glob.glob('../../..//**/plugins/**', recursive=True)) + dirs.extend(glob.glob('../../..//**/packages/**', recursive=True)) + dirs.append('../../floconsole') + + uvicorn.run( + 'server:app', + host='0.0.0.0', + port=8002, + workers=1, + reload=True, + reload_includes=dirs, + log_level='info', + ) + print(f'Started application in environment: {environment}') diff --git a/wavefront/server/apps/floconsole/floconsole/services/app_service.py b/wavefront/server/apps/floconsole/floconsole/services/app_service.py new file mode 100644 index 00000000..96c98769 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/services/app_service.py @@ -0,0 +1,87 @@ +from datetime import datetime, timezone +from typing import List, Optional +from uuid import UUID +from async_lru import alru_cache + +from floconsole.db.models.app import App +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository + + +class AppService: + def __init__(self, app_repository: SQLAlchemyRepository[App]): + self.app_repository = app_repository + + @alru_cache(maxsize=1, ttl=3600) + async def get_all_apps(self) -> List[App]: + """Get all non-deleted apps""" + return await self.app_repository.find(deleted=False) + + @alru_cache(maxsize=128, ttl=3600) + async def get_app_by_id(self, app_id: UUID) -> Optional[App]: + """Get app by ID if not deleted""" + return await self.app_repository.find_one(id=app_id, deleted=False) + + @alru_cache(maxsize=128, ttl=3600) + async def get_app_by_name(self, app_name: str) -> Optional[App]: + """Get app by name if not deleted""" + return await self.app_repository.find_one(app_name=app_name, deleted=False) + + async def create_app( + self, + app_name: str, + app_url: Optional[str] = None, + status: str = 'in_progress', + app_secret: Optional[str] = None, + app_key: Optional[str] = None, + deployment_type: str = 'manual', + type: str = 'custom', + config: dict = {}, + ) -> App: + """Create a new app""" + result = await self.app_repository.create( + app_name=app_name, + app_url=app_url, + status=status, + app_secret=app_secret, + app_key=app_key, + deployment_type=deployment_type, + type=type, + config=config, + ) + # Clear all_apps cache since we added a new app + self._clear_all_apps_cache() + return result + + async def update_app(self, app_id: UUID, **update_data) -> Optional[App]: + """Update app by ID""" + if update_data: + update_data['updated_at'] = datetime.now(timezone.utc) + result = await self.app_repository.find_one_and_update( + filters={'id': app_id, 'deleted': False}, refresh=True, **update_data + ) + if result: + # Clear both caches since app was updated + self._clear_all_caches() + return result + return None + + def _clear_all_apps_cache(self): + """Clear the get_all_apps cache""" + self.get_all_apps.cache_clear() + + def _clear_all_caches(self): + """Clear all caches""" + self._clear_all_apps_cache() + self.get_app_by_id.cache_clear() + + async def delete_app(self, app_id: UUID) -> Optional[App]: + """Soft delete app by ID""" + result = await self.app_repository.find_one_and_update( + filters={'id': app_id, 'deleted': False}, + deleted=True, + updated_at=datetime.now(timezone.utc), + ) + if result: + # Clear both caches since app was deleted + self._clear_all_caches() + return result diff --git a/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py b/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py new file mode 100644 index 00000000..5efa42a2 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py @@ -0,0 +1,210 @@ +from dataclasses import dataclass + +# from floconsole.constants.app import AppDeploymentType +from floconsole.constants.auth import SERVICE_AUTH_ROLE_ID, RootfloHeaders +import httpx +import jwt +import os +from datetime import datetime, timedelta +from fastapi import Request +from fastapi.responses import Response, StreamingResponse + +from floconsole.services.token_service import TokenService +from floconsole.services.app_service import AppService + + +@dataclass +class UserSession: + role_id: str + user_id: str + session_id: str + + +class FlowareProxyService: + def __init__( + self, + token_service: TokenService, + app_service: AppService, + service_issuer: str = 'https://console.rootflo.ai', + app_env: str = 'production', + token_prefix: str = 'fc_', + temporary_token_expiry: int = 300, + ): + self.token_service = token_service + self.app_service = app_service + self.service_issuer = service_issuer + self.is_dev = app_env == 'dev' + self.app_env = app_env + self.token_prefix = token_prefix + self.temporary_token_expiry = int(temporary_token_expiry) + self.passthrough_secret = os.getenv('PASSTHROUGH_SECRET') + + async def _get_app_base_url(self, app_url: str, app_id: str) -> str: + """Get app base URL - used for both floware URL and JWT audience""" + if app_url.startswith('http'): + return app_url.rstrip('/') + elif self.is_dev and 'localhost' in app_id: + return f'http://{app_id}' + elif self.is_dev and 'host.docker.internal' in app_id: + return f'http://{app_id}' + else: + return app_url.rstrip('/') + + async def _generate_service_token( + self, session: UserSession, app, app_base_url: str + ) -> str: + """Generate T2 service token using app secret""" + now = datetime.now() + + # Create service token with console issuer and app-specific audience + payload = { + 'iss': self.service_issuer, + 'aud': app_base_url, + 'iat': int(now.timestamp()), + 'exp': int( + (now + timedelta(seconds=self.temporary_token_expiry)).timestamp() + ), # Short-lived + 'sub': session.user_id, + 'user_id': session.user_id, + 'role_id': SERVICE_AUTH_ROLE_ID, + 'service_auth': True, # Mark as service-to-service token + } + + # Sign with app-specific secret + service_token = jwt.encode(payload, app.app_secret, algorithm='HS256') + return f'{self.token_prefix}{service_token}' + + async def proxy_request( + self, method: str, app_id: str, path: str, request: Request + ) -> Response: + """ + Proxy request to floware service with service authentication + + Flow: + 1. Get user session from middleware (already validated) + 2. Fetch app details from database using app_id + 3. Generate T2 service token with app-specific secret + 4. Forward request to floware with app-specific key + Authorization headers + 5. Return floware response directly + """ + # Step 1: Get user session from middleware (already validated) + # session = request.state.session + + # Step 2: Fetch app details from database + try: + app = await self.app_service.get_app_by_id(app_id) + if not app: + raise ValueError(f'App not found for ID: {app_id}') + except ValueError as e: + if 'App not found' in str(e): + raise e + raise ValueError(f'Invalid app_id format: {app_id}') + + # if app.deployment_type == AppDeploymentType.MANUAL.value: + # app_base_url = await self._get_app_base_url(app.app_url, app_id) + # else: + # app_base_url = await self._get_app_base_url( + # 'https://' + app.app_name + '-floware.apps.rootflo.ai', app_id + # ) + + app_base_url = await self._get_app_base_url(app.app_url, app_id) + + # Step 4: Generate T2 service token with app-specific secret + # service_token = await self._generate_service_token(session, app, app_base_url) + + # Step 5: Prepare request to floware + floware_url = f'{app_base_url}/floware/{path}' + + # Copy headers from original request, excluding Authorization + headers = { + key: value + for key, value in request.headers.items() + if key.lower() not in ['authorization', 'host', 'content-length'] + } + + # Add service authentication headers using app-specific credentials + # headers[RootfloHeaders.CLIENT_KEY] = app.app_key + # headers['Authorization'] = f'Bearer {service_token}' + headers['Content-Type'] = request.headers.get( + 'Content-Type', 'application/json' + ) + + # Add passthrough header for non-production environments + if self.app_env != 'production' and self.passthrough_secret: + headers[RootfloHeaders.PASSTHROUGH] = self.passthrough_secret + + # Copy query parameters + query_params = dict(request.query_params) + + # Detect if streaming (SSE) is needed + is_streaming = 'text/event-stream' in request.headers.get('accept', '').lower() + + # Step 6: Make request to floware + if is_streaming: + # Streaming path: Keep client and stream contexts alive during iteration + client = httpx.AsyncClient(timeout=120.0) + + # Start the stream context + stream_context = client.stream( + method=method, + url=floware_url, + headers=headers, + content=request.stream() + if method in ['POST', 'PUT', 'PATCH', 'DELETE'] + else None, + params=query_params, + ) + + # Enter the stream context to get response metadata + response = await stream_context.__aenter__() + + # Extract headers and status before streaming + response_headers = { + key: value + for key, value in response.headers.items() + if key.lower() + not in ['content-length', 'transfer-encoding', 'connection'] + } + status_code = response.status_code + + # Create generator that streams and cleans up contexts when done + async def stream_generator(): + try: + async for chunk in response.aiter_bytes(): + yield chunk + finally: + # Clean up stream context and client when streaming completes + await stream_context.__aexit__(None, None, None) + await client.aclose() + + return StreamingResponse( + stream_generator(), + status_code=status_code, + headers=response_headers, + media_type=response.headers.get('content-type', 'text/event-stream'), + ) + + # Non-streaming path: Use context manager for automatic cleanup + async with httpx.AsyncClient(timeout=600.0) as client: + # Non-streaming path: Buffer entire response (backward compatible) + response = await client.request( + method=method, + url=floware_url, + headers=headers, + content=request.stream() + if method in ['POST', 'PUT', 'PATCH', 'DELETE'] + else None, + params=query_params, + ) + + # Return floware response directly - let floware handle JSON formatting + return Response( + content=response.content, + status_code=response.status_code, + headers={ + key: value + for key, value in response.headers.items() + if key.lower() + not in ['content-length', 'transfer-encoding', 'connection'] + }, + ) diff --git a/wavefront/server/apps/floconsole/floconsole/services/token_service.py b/wavefront/server/apps/floconsole/floconsole/services/token_service.py new file mode 100644 index 00000000..b7f28c6a --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/services/token_service.py @@ -0,0 +1,158 @@ +import base64 +import json +import jwt +import hashlib + +from datetime import datetime +from datetime import timedelta +from enum import Enum +from typing import Any +from flo_cloud._types import FloKMS + + +class TokenAlgorithms(str, Enum): + RS256 = 'RS256' + PS256 = 'PS256' + ES256 = 'ES256' + ES384 = 'ES384' + ES512 = 'ES512' + RS384 = 'RS384' + RS512 = 'RS512' + PS384 = 'PS384' + PS512 = 'PS512' + + +class TokenService: + def __init__( + self, + private_key: str, + public_key: str, + kms_service: FloKMS | None, + algorithm: TokenAlgorithms = TokenAlgorithms.PS256, + token_expiry: int = 4 * 60 * 60, # 4 hours in seconds + temporary_token_expiry: int = 10 * 60, # 10 minutes in seconds + app_env: str = 'production', + token_prefix: str = 'fc_', + issuer: str = 'https://console.rootflo.ai', + audience: str = 'https://console.rootflo.ai', + ): + self.is_dev = app_env == 'dev' or (kms_service is None) + self.private_key = self._load_key(private_key) + self.public_key = self._load_key(public_key) + self.algorithm = TokenAlgorithms.RS256.value if self.is_dev else algorithm.value + self.token_expiry = int(token_expiry) + self.temporary_token_expiry = int(temporary_token_expiry) + self.kms_service = kms_service + self.token_prefix = token_prefix or 'fc_' + self.issuer = issuer + self.audience = audience + + def _load_key(self, key: str): + key = base64.b64decode(key).decode('ascii') + return key + + def create_token( + self, + sub: str | None = None, + user_id: str | None = None, + role_id: str | None = None, + expiry: int | None = None, + payload: dict[str, Any] | None = None, + is_temporary: bool = False, + ) -> str: + if not is_temporary and (sub is None or user_id is None or role_id is None): + raise ValueError('Required values are missing for creating a token') + + now = datetime.now() + data = { + key: value + for key, value in [ + ('sub', sub), + ('user_id', user_id), + ('role_id', role_id), + ] + if value is not None + } + + expiry_seconds = expiry or ( + self.temporary_token_expiry if is_temporary else self.token_expiry + ) + data['exp'] = int((now + timedelta(seconds=expiry_seconds)).timestamp()) + data['iat'] = int(now.timestamp()) + data['iss'] = self.issuer + data['aud'] = self.audience + + if payload: + data.update(payload) + + if self.is_dev: + token = jwt.encode({**data}, self.private_key, algorithm=self.algorithm) + return f'{self.token_prefix}{token}' + else: + header = {'alg': self.algorithm, 'typ': 'JWT'} + + header_b64 = self._base64url_encode(json.dumps(header).encode()) + payload_b64 = self._base64url_encode(json.dumps(data).encode()) + message = f'{header_b64}.{payload_b64}' + + digest = hashlib.sha256(message.encode()).digest() + + if self.kms_service is None: + raise ValueError('KMS service is not initialized') + + signature = self.kms_service.sign(message=digest) + signature = self._base64url_encode(signature) + + token = f'{message}.{signature}' + return f'{self.token_prefix}{token}' + + def decode_token(self, token: str) -> dict: + # Validate and remove prefix + if not token.startswith(self.token_prefix): + raise ValueError( + f'Invalid token format: missing prefix "{self.token_prefix}"' + ) + + # Remove the prefix + clean_token = token[len(self.token_prefix) :] + + if self.is_dev: + decoded = jwt.decode( + clean_token, + self.public_key, + algorithms=[self.algorithm], + issuer=self.issuer, + audience=self.audience, + ) + return decoded + else: + header_b64, payload_b64, signature_b64 = clean_token.split('.') + + message = f'{header_b64}.{payload_b64}' + digest = hashlib.sha256(message.encode()).digest() + signature = self._base64url_decode(signature_b64) + + if self.kms_service is None: + raise ValueError('KMS service is not initialized') + + is_valid = self.kms_service.verify(message=digest, signature=signature) + if not is_valid: + return {} + + public_key_pem = self.kms_service.get_public_key_pem() + + decoded = jwt.decode( + clean_token, + public_key_pem, + algorithms=[self.algorithm], + issuer=self.issuer, + audience=self.audience, + ) + return decoded + + def _base64url_encode(self, data: bytes) -> str: + return base64.urlsafe_b64encode(data).rstrip(b'=').decode('utf-8') + + def _base64url_decode(self, data: str) -> bytes: + padding = '=' * (-len(data) % 4) + return base64.urlsafe_b64decode(data + padding) diff --git a/wavefront/server/apps/floconsole/floconsole/utils/password_utils.py b/wavefront/server/apps/floconsole/floconsole/utils/password_utils.py new file mode 100644 index 00000000..3bc6f837 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/utils/password_utils.py @@ -0,0 +1,13 @@ +import bcrypt + + +def hash_password(password: str) -> str: + salt = bcrypt.gensalt() + hashed = bcrypt.hashpw(password.encode('utf-8'), salt) + return hashed.decode('utf-8') + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return bcrypt.checkpw( + plain_password.encode('utf-8'), hashed_password.encode('utf-8') + ) diff --git a/wavefront/server/apps/floconsole/floconsole/utils/user_utils.py b/wavefront/server/apps/floconsole/floconsole/utils/user_utils.py new file mode 100644 index 00000000..e6b679ef --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/utils/user_utils.py @@ -0,0 +1,11 @@ +from fastapi import Request + + +def get_current_user(req: Request): + return ( + req.state.session.role_id, + req.state.session.user_id, + req.state.session.session_id + if hasattr(req.state, 'session') and req.state.session + else None, + ) diff --git a/wavefront/server/apps/floconsole/pyproject.toml b/wavefront/server/apps/floconsole/pyproject.toml new file mode 100644 index 00000000..f562f764 --- /dev/null +++ b/wavefront/server/apps/floconsole/pyproject.toml @@ -0,0 +1,37 @@ +[project] +name = "floconsole" +version = "0.1.0" +description = "Flo Console backend for rootflo apps" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "flo-cloud", + "fastapi>=0.115.2,<1.0.0", + "psycopg[binary,pool]>=3.2.3,<4.0.0", + "uvicorn>=0.30.5,<1.0.0", + "bcrypt>=4.2.1,<5.0.0", + "alembic>=1.15.2,<2.0.0", + "httpx>=0.28.1,<1.0.0", + "sqlalchemy>=2.0.40,<3.0.0", + "python-dotenv>=1.1.0,<2.0.0", + "dependency-injector>=4.46.0,<5.0.0", + "psycopg2>=2.9.10,<3.0.0", + "python-jose[cryptography]>=3.3.0,<4.0.0", + "async-lru>=2.0.5", +] + +[tool.uv.sources] +common-module = { workspace = true } +flo-cloud = { workspace = true } + + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["floconsole"] diff --git a/wavefront/server/apps/floware/floware/__init__.py b/wavefront/server/apps/floware/floware/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/floware/floware/channels.py b/wavefront/server/apps/floware/floware/channels.py new file mode 100644 index 00000000..a8b5be4f --- /dev/null +++ b/wavefront/server/apps/floware/floware/channels.py @@ -0,0 +1,49 @@ +import asyncio +import threading +from db_repo_module.cache.cache_manager import CacheManager +from common_module.log.logger import logger +from api_services_module.utils.api_change_processor import ApiChangeProcessor +from api_services_module.utils.api_change_publisher import ( + REDIS_API_SERVICE_UPDATES_CHANNEL, +) + + +async def start_redis_listener( + cache_manager: CacheManager, + api_change_processor: ApiChangeProcessor, +): + """ + Start Redis PubSub listener in a non-blocking way. + """ + queue = asyncio.Queue() + + pubsub = cache_manager.subscribe(channels=[REDIS_API_SERVICE_UPDATES_CHANNEL]) + logger.info('Subscribed to Redis channel: %s', REDIS_API_SERVICE_UPDATES_CHANNEL) + + # Capture the running loop from the main thread + loop = asyncio.get_running_loop() + + # Run the blocking pubsub.listen() inside a thread + def run_pubsub(): + try: + for message in pubsub.listen(): + if message['type'] == 'message': + asyncio.run_coroutine_threadsafe(queue.put(message['data']), loop) + except Exception as e: + logger.error(f'Error in pubsub thread: {e}') + + thread = threading.Thread(target=run_pubsub, daemon=True) + thread.start() + + logger.info('Redis listener thread started') + + # Async loop: process messages from the queue + while True: + data = await queue.get() + try: + logger.info(f'Received update: {data}') + await api_change_processor.process_message(data) + except Exception as e: + logger.error(f'Error processing message: {e}') + finally: + queue.task_done() diff --git a/wavefront/server/apps/floware/floware/config.ini b/wavefront/server/apps/floware/floware/config.ini new file mode 100644 index 00000000..717796ed --- /dev/null +++ b/wavefront/server/apps/floware/floware/config.ini @@ -0,0 +1,173 @@ +[app_config] +client_id = ${CLIENT_ID} +client_secret = ${CLIENT_SECRET} +product_id = ${PRODUCT_ID} + +[env_config] +app_env = ${APP_ENV} +app_name = ${APP_NAME:floware} + +[database] +username = ${DB_USERNAME} +password = ${DB_PASSWORD} +host = ${DB_HOST} +port = ${DB_PORT} +db_name = ${DB_NAME} + +[redshift] +username = ${REDSHIFT_USERNAME} +password = ${REDSHIFT_PASSWORD} +host = ${REDSHIFT_HOST} +port = ${REDSHIFT_PORT} +db_name = ${REDSHIFT_DB} + +[insights] +today_as_max_from_db=${INSIGHTS_TODAY_AS_MAX_DATE} + +[redis] +protocol = ${REDIS_PROTOCOL} +host = ${REDIS_HOST} +port = ${REDIS_PORT} + +[azure] +azure_embeddings_endpoint = ${AZURE_OPEN_AI_EMBEDDING_ENDPOINT} +azure_embeddings_deployment = ${AZURE_EMBEDDINGS_DEPLOYMENT} +azure_embeddings_api_key = ${AZURE_OPEN_EMBEDDING_AI_KEY} +client_id = ${AZURE_CLIENT_ID} +tenant_id = ${AZURE_TENANT_ID} +client_secret = ${AZURE_CLIENT_SECRET} +scopes = ${AZURE_SCOPES} +redirect_uri = ${AZURE_REDIRECT_URI} + +[google] +secret_file_path = ${GOOGLE_SECRET_FILE_PATH} +scopes = ${GOOGLE_SCOPES} +redirect_uri = ${GOOGLE_REDIRECT_URI} + +[extractor] +tesseract_path = ${TESSERACT_PATH} + +[azure_openai] +endpoint = ${AZURE_OPENAI_ENDPOINT} +api_key = ${AZURE_OPENAI_API_KEY} +api_version = ${AZURE_OPENAI_API_VERSION} + +[slack] +client_id = ${SLACK_CLIENT_ID} +client_secret = ${SLACK_CLIENT_SECRET} +scopes = ${SLACK_SCOPES} +redirect_uri = ${SLACK_REDIRECT_URI} + +[jwt_token] +token_expiry=${TOKEN_EXPIRY} +temporary_token_expiry=${TEMPORARY_TOKEN_EXPIRY} +private_key=${PRIVATE_KEY} +public_key=${PUBLIC_KEY} +enable_cloud_kms=${ENABLE_CLOUD_KMS} +console_token_prefix=${CONSOLE_TOKEN_PREFIX:fc_} +issuer=${FLOWARE_JWT_ISSUER:https://floware.rootflo.ai} +audience=${FLOWARE_JWT_AUDIENCE:https://floware.rootflo.ai} +validation_issuer=${FLOWARE_JWT_VALIDATION_ISSUER:https://console.rootflo.ai,https://floware.rootflo.ai} # allow console as well + +[superset] +url=${SUPERSET_URL} +username=${SUPERSET_USERNAME} +password=${SUPERSET_PASSWORD} + +[web] +url=${WEB_URL} + +[aws] +aws_access_key=${AWS_ACCESS_KEY_ID} +aws_secret_key=${AWS_SECRET_ACCESS_KEY} +transcript_bucket_name=${TRANSCRIPT_BUCKET_NAME} +aws_asset_storage_bucket=${AWS_GOLD_ASSET_BUCKET_NAME} +audio_bucket_name=${AUDIO_BUCKET_NAME} +model_storage_bucket=${MODEL_STORAGE_BUCKET} +queue_url=${AWS_QUEUE_URL} +region=${AWS_REGION} + +[gcp] +transcript_bucket_name=${TRANSCRIPT_BUCKET_NAME} +audio_bucket_name=${AUDIO_BUCKET_NAME} +model_storage_bucket=${MODEL_STORAGE_BUCKET} +gcp_service_account_json = ${GCP_SERVICE_ACCOUNT_JSON} +gcp_asset_storage_bucket = ${GCP_ASSET_STORAGE_BUCKET} +gcp_project_id = ${GCP_PROJECT_ID} +gold_topic_id=${GCP_GOLD_TOPIC_ID} +email_topic_id=${GCP_EMAIL_TOPIC_ID} +gcp_storage_bucket_name = ${GCP_STORAGE_BUCKET_NAME} +gcp_location = ${GCP_LOCATION} +gcp_key_ring = ${GCP_KMS_KEY_RING} +gcp_crypto_key = ${GCP_KMS_CRYPTO_KEY} +gcp_crypto_key_version = ${GCP_KMS_CRYPTO_KEY_VERSION} +config_file_name=${CONFIG_FILE_NAME} + +[bigquery] +project_id=${BQ_PROJECT_ID} +dataset_id=${BQ_DATASET_ID} + +[cloud_config] +cloud_provider=${CLOUD_PROVIDER} + +[email] +email_provider=${EMAIL_PROVIDER} + +[outlook] +client_id=${OUTLOOK_CLIENT_ID} +client_secret=${OUTLOOK_CLIENT_SECRET} +tenant_id=${OUTLOOK_TENANT_ID} +email_id=${OUTLOOK_SENDER_EMAILID} +authority=${AUTHORITY} +webhook_url=${WEBHOOK_URL} + +[gmail] +service_account_file=${GMAIL_SERVICE_ACCOUNT_FILE} +email_sender=${GMAIL_SENDER_EMAILID} +delegate_user=${GMAIL_DELEGATE_USER} + +[scheduler] +daily_alert_cron=${DAILY_ALERT_CRON:0 5 * * *} +weekly_alert_cron=${WEEKLY_ALERT_CRON:0 6 * * 1} +usage_metric_cron=${USAGE_METRIC_CRON:0 * * * *} +daily_leads_cron=${DAILY_LEADS_CRON:30 5 * * *} + +[usage_metric] +url=${USAGE_METRIC_URL} + +[openai] +openai_api_key=${OPENAI_API_KEY} +openai_model_name=${OPENAI_MODEL_NAME} + +[leads] +enabled=${LEADS_ENABLED} + +[agents] +agent_yaml_bucket=${AGENT_YAML_BUCKET} + +[auth] +max_failed_attempts=${MAX_FAILED_ATTEMPTS} +lockout_duration_hours=${LOCKOUT_DURATION_HOURS} +inactive_days_threshold=${INACTIVE_DAYS_THRESHOLD:60} + +[model] +inference_service_url=${INFERENCE_SERVICE_URL} + +[embedding_url] +embedding_service_url=${EMBEDDING_SERVICE_URL} + +[image_search] +reference_images_bucket = ${IMAGE_SEARCH_REFERENCE_IMAGES_BUCKET} + +[voice_agents] +call_processing_base_url=${CALL_PROCESSING_BASE_URL} +voice_agent_bucket=${VOICE_AGENT_BUCKET} + +[api_service] +application_bucket=${APPLICATION_BUCKET} + +[hermes] +url=${HERMES_URL:http://localhost:8080/flo-hermes} + +[workflow] +worker_topic=${WORKFLOW_WORKER_TOPIC} diff --git a/wavefront/server/apps/floware/floware/controllers/__init__.py b/wavefront/server/apps/floware/floware/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/floware/floware/controllers/config_controller.py b/wavefront/server/apps/floware/floware/controllers/config_controller.py new file mode 100644 index 00000000..027e30a5 --- /dev/null +++ b/wavefront/server/apps/floware/floware/controllers/config_controller.py @@ -0,0 +1,99 @@ +from typing import Annotated + +from fastapi import APIRouter, Request +from fastapi import UploadFile, File +from floware.di.application_container import ApplicationContainer +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from fastapi.params import Depends +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from floware.services.config_service import ConfigService +from user_management_module.utils.user_utils import get_current_user, check_is_admin +from fastapi import HTTPException +from fastapi import Form +import json +from fastapi.responses import JSONResponse +from fastapi import status + +config_router = APIRouter(prefix='/v1') + + +# this can also receive a app_config dict with key and value +@config_router.put('/settings/config/app-icon') +@inject +async def set_config( + request: Request, + response_formatter: Annotated[ + ResponseFormatter, + Depends(Provide[CommonContainer.response_formatter]), + ], + config_service: Annotated[ + ConfigService, + Depends(Provide[ApplicationContainer.config_service]), + ], + app_config: str = Form(None), + file: UploadFile = File(None), +): + """ + This endpoint is used to upload the logo to the cloud storage. + The file size should be less than 1MB. + The file type should be png, jpeg, jpg. + The file will be saved in the config bucket with the name config_file_name. + The config_file_name is a constant value in the config.ini file. + """ + # checking if the user is admin + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + if not is_admin: + raise HTTPException(status_code=401, detail='Unauthorized') + + # Parse the app_config JSON string to dict + app_config_dict = {} + if app_config: + try: + app_config_dict = json.loads(app_config) + except json.JSONDecodeError: + raise HTTPException( + status_code=400, detail='Invalid app_config JSON format' + ) + + await config_service.store_app_config(file, app_config_dict) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Config set successfully'} + ), + ) + + +@config_router.get('/settings/config') +@inject +async def get_config( + config_service: Annotated[ + ConfigService, + Depends(Provide[ApplicationContainer.config_service]), + ], + response_formatter: Annotated[ + ResponseFormatter, + Depends(Provide[CommonContainer.response_formatter]), + ], +): + """ + this endpoint will return all the configuration of the application. + such as logo, table to query, etc. + """ + url, app_config = await config_service.get_app_config() + if not url: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'No config found'} + ), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'app_icon': url, 'app_config': app_config} + ), + ) diff --git a/wavefront/server/apps/floware/floware/controllers/notification_controller.py b/wavefront/server/apps/floware/floware/controllers/notification_controller.py new file mode 100644 index 00000000..4d542b10 --- /dev/null +++ b/wavefront/server/apps/floware/floware/controllers/notification_controller.py @@ -0,0 +1,74 @@ +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.notification_users import NotificationUser +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter + +from floware.di.application_container import ApplicationContainer +from floware.services.notification_service import NotificationService + +notification_router = APIRouter() + + +@notification_router.get('/notification') +@inject +async def get_notifications( + request: Request, + notification_service: NotificationService = Depends( + Provide[ApplicationContainer.notification_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + notification_res = [] + current_id = request.state.session.user_id + notification_res = await notification_service.fetch_notification(user_id=current_id) + + response = [ + { + 'id': str(notify['notification_id']), + 'title': notify['title'], + 'type': notify['type'], + 'created_at': str(notify['created_at']), + 'updated_at': str(notify['updated_at']), + 'user_id': str(current_id), + 'seen': notify['seen'] if notify['seen'] else False, + } + for notify in notification_res + ] + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'notifications': response}), + ) + + +@notification_router.patch('/notification') +@inject +async def updateNotification( + id: str, + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + notification_user_repository: SQLAlchemyRepository[NotificationUser] = Depends( + Provide[ApplicationContainer.notification_user_repository] + ), +): + current_id = request.state.session.user_id + await notification_user_repository.upsert( + {'notification_id': id, 'user_id': current_id}, seen=True + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'updated successfully'} + ), + ) diff --git a/wavefront/server/apps/floware/floware/decorators/with_lock.py b/wavefront/server/apps/floware/floware/decorators/with_lock.py new file mode 100644 index 00000000..404ead2d --- /dev/null +++ b/wavefront/server/apps/floware/floware/decorators/with_lock.py @@ -0,0 +1,41 @@ +from functools import wraps + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from pottery import Redlock + + +def with_lock( + lock_key: str, cache_manager: CacheManager, auto_release_time=600, cache_expiry=3600 +): + """ + Decorator to handle distributed locking using Redlock. + """ + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + lock = Redlock( + key=lock_key, + masters={cache_manager.redis}, + auto_release_time=auto_release_time, + ) + try: + with lock: + if cache_manager.get_str(lock_key): + logger.info( + f'Job "{lock_key}" already executed. Skipping re-run.' + ) + return + # Execute the wrapped function + if cache_manager.add(lock_key, 1, expiry=cache_expiry): + logger.info(f'Executing the job "{lock_key}"') + result = func(*args, **kwargs) + return result + except Exception as e: + logger.error(f'Error while executing job "{e}"') + raise + + return wrapper + + return decorator diff --git a/wavefront/server/apps/floware/floware/di/application_container.py b/wavefront/server/apps/floware/floware/di/application_container.py new file mode 100644 index 00000000..e0b94952 --- /dev/null +++ b/wavefront/server/apps/floware/floware/di/application_container.py @@ -0,0 +1,41 @@ +from dependency_injector import containers +from dependency_injector import providers + +from floware.services.notification_service import NotificationService +from flo_cloud.cloud_storage import CloudStorageManager +from floware.services.config_service import ConfigService + + +class ApplicationContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['./config.ini']) + # db + db_client = providers.Dependency() + + email_repository = providers.Dependency() + oauth_credential_repository = providers.Dependency() + user_repository = providers.Dependency() + task_repository = providers.Dependency() + + insights_service = providers.Dependency() + pvo_repository = providers.Dependency() + + notification_repository = providers.Dependency() + notification_user_repository = providers.Dependency() + config_repository = providers.Dependency() + + # services + notification_service = providers.Singleton( + NotificationService, notification_repository, notification_user_repository + ) + + cloud_manager = providers.Singleton( + CloudStorageManager, + provider=config.cloud_config.cloud_provider, + ) + + config_service = providers.Singleton( + ConfigService, + config_repository=config_repository, + cloud_manager=cloud_manager, + config=config, + ) diff --git a/wavefront/server/apps/floware/floware/middleware/__init__.py b/wavefront/server/apps/floware/floware/middleware/__init__.py new file mode 100644 index 00000000..ed5a7cb9 --- /dev/null +++ b/wavefront/server/apps/floware/floware/middleware/__init__.py @@ -0,0 +1,7 @@ +""" +Middleware package for Floware application. +""" + +from .security_headers import SecurityHeadersMiddleware + +__all__ = ['SecurityHeadersMiddleware'] diff --git a/wavefront/server/apps/floware/floware/middleware/security_headers.py b/wavefront/server/apps/floware/floware/middleware/security_headers.py new file mode 100644 index 00000000..ac9c06bb --- /dev/null +++ b/wavefront/server/apps/floware/floware/middleware/security_headers.py @@ -0,0 +1,177 @@ +""" +Security Headers Middleware for FastAPI + +This middleware adds essential security headers to all HTTP responses to protect against +various web vulnerabilities and implement security best practices. + +Headers Implemented: +1. X-Content-Type-Options: nosniff + - Prevents browsers from interpreting files as something other than their declared MIME type + - Protects against MIME-type confusion attacks + +2. X-XSS-Protection: 1; mode=block + - Enables the built-in XSS filter in modern web browsers + - Instructs browser to block rather than sanitize when XSS is detected + +3. X-Frame-Options: SAMEORIGIN + - Controls how your site can be embedded in iframes + - Set to SAMEORIGIN to allow embedding only on the same origin + - Prevents clickjacking attacks + +4. Referrer-Policy: strict-origin-when-cross-origin + - Controls how much information is included in the HTTP Referer header + - Balances functionality with privacy by sending full URL for same-origin requests + - Sends only origin for cross-origin requests + +5. Content-Security-Policy: (environment-dependent) + - Defines trusted sources for various content types + - Helps prevent XSS attacks by restricting resource loading + - Different policies for development vs production environments + +6. Strict-Transport-Security: (production only) + - Forces HTTPS connections when in production + - Protects against protocol downgrade attacks + +7. Cache-Control: no-store, no-cache, must-revalidate + - Prevents caching of sensitive information + - Ensures fresh content is always fetched from server + - Protects against cache-based information leakage + +8. Pragma: no-cache + - Legacy cache control for HTTP/1.0 compatibility + - Ensures older proxies and browsers don't cache responses + +Usage: + Add this middleware to your FastAPI app before CORS middleware: + + app.add_middleware(SecurityHeadersMiddleware) + +Testing: + Use the included test script to verify headers are properly set: + + python test_security_headers.py --url http://localhost:8001 +""" + +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.types import ASGIApp +import os + + +class SecurityHeadersMiddleware(BaseHTTPMiddleware): + """ + Middleware to add security headers to all responses. + + Headers added: + - X-Content-Type-Options: nosniff - Prevents MIME type sniffing + - X-XSS-Protection: 1; mode=block - Enables XSS protection in browsers + - X-Frame-Options: SAMEORIGIN - Controls iframe embedding + - Referrer-Policy: strict-origin-when-cross-origin - Controls referrer information + - Content-Security-Policy: Basic CSP for additional protection + - Cache-Control: no-store, no-cache, must-revalidate - Prevents caching + - Pragma: no-cache - Legacy cache control for HTTP/1.0 compatibility + - Expires: 0 - Prevents caching + """ + + def __init__(self, app: ASGIApp): + super().__init__(app) + + # Get environment-specific configuration + self.environment = os.getenv('APP_ENV', 'dev') + + # Configure static security headers based on environment + self.static_security_headers = { + # Prevent browsers from interpreting files as something other than declared MIME type + 'X-Content-Type-Options': 'nosniff', + # Enable XSS filter in modern browsers + 'X-XSS-Protection': '1; mode=block', + # Control iframe embedding - allow same origin + 'X-Frame-Options': 'SAMEORIGIN', + # Control referrer information + 'Referrer-Policy': 'strict-origin-when-cross-origin', + # Basic Content Security Policy + 'Content-Security-Policy': self._get_csp_header(), + # Legacy cache control for HTTP/1.0 compatibility + 'Pragma': 'no-cache', + # Prevent caching + 'Expires': '0', + # Strict Transport Security (HTTPS only) + 'Strict-Transport-Security': 'max-age=31536000; includeSubDomains' + if self.environment == 'production' + else None, + } + + # Remove None values + self.static_security_headers = { + k: v for k, v in self.static_security_headers.items() if v is not None + } + + def _get_csp_header(self) -> str: + """ + Generate Content Security Policy header based on environment. + + Returns: + str: CSP header value + """ + if self.environment == 'production': + # Stricter CSP for production + return ( + "default-src 'self'; " + "script-src 'self' 'unsafe-inline' 'unsafe-eval'; " + "style-src 'self' 'unsafe-inline'; " + "img-src 'self' data: https:; " + "font-src 'self' data:; " + "connect-src 'self'; " + "frame-ancestors 'self'; " + "base-uri 'self'; " + "form-action 'self'" + ) + else: + # More permissive CSP for development + return ( + "default-src 'self'; " + "script-src 'self' 'unsafe-inline' 'unsafe-eval' http://localhost:* ws://localhost:*; " + "style-src 'self' 'unsafe-inline'; " + "img-src 'self' data: https: http:; " + "font-src 'self' data:; " + "connect-src 'self' http://localhost:* ws://localhost:*; " + "frame-ancestors 'self'; " + "base-uri 'self'; " + "form-action 'self'" + ) + + def _get_cache_control_header(self, request_path: str) -> str: + """ + Determine appropriate Cache-Control header based on the request path. + + Args: + request_path: The path of the current request + + Returns: + str: Cache-Control header value + """ + return 'no-store, no-cache, must-revalidate, max-age=0' + + async def dispatch(self, request: Request, call_next) -> Response: + """ + Process the request and add security headers to the response. + + Args: + request: The incoming HTTP request + call_next: The next middleware or route handler + + Returns: + Response: The HTTP response with security headers added + """ + # Process the request + response = await call_next(request) + + # Add static security headers to the response + for header_name, header_value in self.static_security_headers.items(): + response.headers[header_name] = header_value + + # Add dynamic cache control header based on request path + cache_control = self._get_cache_control_header(request.url.path) + response.headers['Cache-Control'] = cache_control + + return response diff --git a/wavefront/server/apps/floware/floware/server.py b/wavefront/server/apps/floware/floware/server.py new file mode 100644 index 00000000..dee93e56 --- /dev/null +++ b/wavefront/server/apps/floware/floware/server.py @@ -0,0 +1,579 @@ +from contextlib import asynccontextmanager +import glob +import os +import asyncio +from typing import Any, Callable, cast + +from dotenv import load_dotenv +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.openapi.utils import get_openapi + +import uvicorn + +# ruff: noqa: E402 +load_dotenv() # Loading env values before importing modules to fix late read problem + +from auth_module.auth_container import AuthContainer +from auth_module.controllers.outlook_controller import subscription_controller +from auth_module.controllers.superset_controller import superset_controller +from auth_module.controllers.hmac_controller import hmac_router +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import ( + RequestIdMiddleware, + get_current_request_id, +) +from common_module.log.logger import logger +from common_module.prometheus.prometheus_middleware import PrometheusMiddleware +from common_module.response_formatter import ResponseFormatter +from db_repo_module.database.connection import DatabaseClient +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import HTTPException +from fastapi import Request +from fastapi.responses import JSONResponse +from gold_module.controllers.router import gold_router +from gold_module.gold_container import GoldContainer +from insights_module.controllers.router import insights_router +from insights_module.insights_container import InsightsContainer + +from knowledge_base_module.controllers.knowledge_base_controller import ( + knowledge_base_router, +) +from knowledge_base_module.controllers.knowledge_base_document_controller import ( + kb_document_router, +) +from knowledge_base_module.controllers.rag_retreival_controller import ( + rag_retrieval_router, +) +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.router import user_management_router +from user_management_module.user_container import UserContainer + +from floware.controllers.notification_controller import notification_router +from floware.di.application_container import ApplicationContainer +from floware.middleware.security_headers import SecurityHeadersMiddleware +from plugins_module.plugins_container import PluginsContainer +from plugins_module.controllers.datasource_controller import datasource_router +from plugins_module.controllers.authenticator_controller import authenticator_router +from floware.controllers.config_controller import config_router +from product_analysis_module.controllers.product_anaysis_controllers import ( + product_analysis_router, +) +from product_analysis_module.product_analysis_container import ProductAnalysisContainer + +from agents_module.controllers.agent_controller import agents_router +from agents_module.controllers.namespace_controller import namespace_router +from agents_module.controllers.workflow_controller import workflows_router +from agents_module.controllers.workflow_runs import workflow_runs_router +from agents_module.controllers.workflow_pipeline_controller import ( + workflow_pipeline_router, +) +from agents_module.agents_container import AgentsContainer +from inference_module.inference_container import InferenceContainer +from inference_module.controllers.inference_controller import inference_router + +from llm_inference_config_module.container import LlmInferenceConfigContainer +from llm_inference_config_module.controllers.llm_inference_config_controller import ( + llm_inference_config_router, +) +from llm_inference_config_module.controllers.inference_proxy_controller import ( + inference_proxy_router, +) +from image_search_module.controllers.image_search_controller import image_search_router +from image_search_module.image_search_container import ImageSearchContainer +from tools_module.controllers.tools_controller import tools_router +from tools_module.tools_container import ToolsContainer +from voice_agents_module.voice_agents_container import VoiceAgentsContainer +from voice_agents_module.controllers.telephony_config_controller import ( + telephony_config_router, +) +from voice_agents_module.controllers.tts_config_controller import tts_config_router +from voice_agents_module.controllers.stt_config_controller import stt_config_router +from voice_agents_module.controllers.voice_agent_controller import voice_agent_router +from plugins_module.controllers.message_processor_controller import ( + message_processor_router, +) + +# API Services Module +from api_services_module.api_services_container import create_api_services_container +from api_services_module.api_services_container import ApiServicesContainer +from floware.channels import start_redis_listener +from starlette.middleware import _MiddlewareFactory + +# Initialize dependency containers +# Create a single shared instance of the database container +db_repo_container = DatabaseModuleContainer() +auth_container = AuthContainer( + db_client=db_repo_container.db_client, cache_manager=db_repo_container.cache_manager +) +common_container = CommonContainer(cache_manager=db_repo_container.cache_manager) +user_module_container = UserContainer( + db_client=db_repo_container.db_client, cache_manager=db_repo_container.cache_manager +) +insights_container = InsightsContainer( + notification_repository=db_repo_container.notification_repository, + cache_manager=db_repo_container.cache_manager, +) + + +application_container = ApplicationContainer( + db_client=db_repo_container.db_client, + email_repository=db_repo_container.email_repository, + oauth_credential_repository=db_repo_container.oauth_credential_repository, + user_repository=db_repo_container.user_repository, + task_repository=db_repo_container.task_repository, + insights_service=insights_container.insights_service, + pvo_repository=insights_container.pvo_repository, + notification_repository=db_repo_container.notification_repository, + notification_user_repository=db_repo_container.notification_user_repository, + config_repository=db_repo_container.config_repository, +) + +email_rag_container = KnowledgeBaseContainer( + db_client=db_repo_container.db_client, cache_manager=db_repo_container.cache_manager +) + +gold_container = GoldContainer() + +plugins_container = PluginsContainer( + db_client=db_repo_container.db_client, + cloud_manager=common_container.cloud_storage_manager, + dynamic_query_repository=db_repo_container.dynamic_query_repository, + cache_manager=db_repo_container.cache_manager, +) + +product_analysis_container = ProductAnalysisContainer() + +tools_container = ToolsContainer( + datasource_repository=db_repo_container.datasource_repository, + knowledge_base_repository=db_repo_container.knowledge_base_repository, + knowledge_base_inference_repository=db_repo_container.knowledge_base_inference_repository, +) + +agents_container = AgentsContainer( + db_client=db_repo_container.db_client, + cloud_storage_manager=common_container.cloud_storage_manager, + cache_manager=db_repo_container.cache_manager, + tool_loader=tools_container.tool_loader, + workflow_pipeline_repository=db_repo_container.workflow_pipeline_repository, + workflow_runs_repository=db_repo_container.workflow_runs_repository, + namespace_repository=db_repo_container.namespace_repository, + agent_repository=db_repo_container.agent_repository, + workflow_repository=db_repo_container.workflow_repository, +) + +inference_container = InferenceContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, +) + +llm_inference_config_container = LlmInferenceConfigContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, +) + +image_search_container = ImageSearchContainer( + db_client=db_repo_container.db_client, + cloud_storage_manager=common_container.cloud_storage_manager, +) + +# API Services Container +api_services_container: ApiServicesContainer = create_api_services_container( + api_service_repository=db_repo_container.api_services_repository, + cloud_storage_manager=common_container.cloud_storage_manager, + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + response_formatter=common_container.response_formatter, +) + +voice_agents_container = VoiceAgentsContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + cloud_storage_manager=common_container.cloud_storage_manager, +) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup code (runs before the application starts) + logger.info('Starting application...') + + try: + db_client: DatabaseClient = db_repo_container.db_client() + + if isinstance(db_client, DatabaseClient): + logger.info('========== Establishing db connection ...') + await db_client.connect() + logger.info('========== DB connection established.') + else: + raise TypeError('db_client is not an instance of DatabaseClient') + + db_client.run_migration() + + # Instantiate scheduler from container when needed + scheduler = common_container.scheduler() + scheduler.start_scheduler() + logger.info('Database connection established.') + + # Load API services from database into registry + service_registry = api_services_container.initialized_service_registry() + if getattr(service_registry, 'api_service_manager', None): + try: + await service_registry.load_from_db() + logger.info('API services loaded from database') + + # Reload routes to include newly loaded services + proxy_router = api_services_container.proxy_router() + proxy_router.reload_routes() + logger.info('API service routes reloaded') + except Exception as e: + logger.warning(f'Failed to load API services from database: {e}') + + api_services_container.initialized_proxy() + + # Include API services router AFTER services are loaded so routes are registered + # This ensures FastAPI's route table includes the dynamic routes + app.include_router( + api_services_container.router(), tags=['API Services'], prefix='/floware' + ) + logger.info('API services router included in app') + + # Start background Redis listener for updates + asyncio.create_task( + start_redis_listener( + cache_manager=db_repo_container.cache_manager(), + api_change_processor=api_services_container.api_change_processor(), + ) + ) + + # Set app reference in proxy router so new routes can be added dynamically + proxy_router = api_services_container.proxy_router() + proxy_router.set_app(app, prefix='/floware') + logger.info('App reference set in proxy router for dynamic route registration') + + yield # This is where the application runs + + # Shutdown code + logger.info('Shutting down application...') + + except Exception as e: + logger.error(f'Error during application lifecycle: {str(e)}') + raise + + +# Define FastAPI app with the lifespan context manager +app = FastAPI(lifespan=lifespan) + +floware_base_url = os.getenv('FLOWARE_BASE_URL', 'http://localhost:8001') + + +def _middleware(cls: type[Any]) -> _MiddlewareFactory[Any]: + return cast(_MiddlewareFactory[Any], cls) + + +OpenApiCallable = Callable[[], dict[str, Any]] + + +def custom_openapi() -> dict[str, Any]: + """Custom OpenAPI schema with Bearer authentication""" + if app.openapi_schema: + return app.openapi_schema + + openapi_schema = get_openapi( + title='Flo API', + version='1.0.0', + description='Floware Server - AI Middleware API', + routes=app.routes, + servers=[{'url': floware_base_url, 'description': 'floware server'}], + ) + + # Add Bearer authentication security scheme + # This matches the scheme_name in BearerAuth class + openapi_schema['components']['securitySchemes'] = { + 'BearerAuth': { + 'type': 'http', + 'scheme': 'bearer', + 'bearerFormat': 'JWT', + 'description': 'Enter your JWT token', + } + } + + # Apply security to all endpoints by default + # Individual endpoints can override this with dependencies=[] + # openapi_schema["security"] = [{"BearerAuth": []}] + + app.openapi_schema = openapi_schema + return app.openapi_schema + + +app.openapi = cast(OpenApiCallable, custom_openapi) # type: ignore[assignment] + + +@app.get('/v1/_metrics') +async def metrics(request: Request): + logger.debug('Metrics endpoint called') + metrics_data = await PrometheusMiddleware.metrics_endpoint(request) + return metrics_data + + +# Add middleware setup + +app.add_middleware(_middleware(RequestIdMiddleware)) +app.add_middleware(_middleware(RequireAuthMiddleware)) +app.add_middleware(_middleware(PrometheusMiddleware)) +app.add_middleware(_middleware(SecurityHeadersMiddleware)) # disable to see swaggerUI + +origins = os.getenv('ALLOWED_ORIGINS', 'http://localhost:5173') +allowed_origins = origins.split(',') + +# Configure CORS with proper security settings +app.add_middleware( + _middleware(CORSMiddleware), + allow_origins=allowed_origins, + allow_credentials=True, + allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], + allow_headers=['*'], + expose_headers=[ + 'X-Content-Type-Options', + 'X-XSS-Protection', + 'X-Frame-Options', + 'Referrer-Policy', + 'Content-Security-Policy', + 'Pragma', + 'Expires', + 'Strict-Transport-Security', + 'Cache-Control', + ], +) + +# Include routers +app.include_router(notification_router, prefix='/floware') +app.include_router(user_management_router, prefix='/floware') +app.include_router(superset_controller, prefix='/floware') +app.include_router(insights_router, prefix='/floware') +app.include_router(knowledge_base_router, prefix='/floware') +app.include_router(kb_document_router, prefix='/floware') +app.include_router(rag_retrieval_router, prefix='/floware') +app.include_router(gold_router, prefix='/floware') +app.include_router(subscription_controller, prefix='/floware') +app.include_router(datasource_router, prefix='/floware') +app.include_router(hmac_router, prefix='/floware') +app.include_router(authenticator_router, prefix='/floware') +app.include_router(config_router, prefix='/floware') +app.include_router(product_analysis_router, prefix='/floware') +app.include_router(agents_router, prefix='/floware') +app.include_router(namespace_router, prefix='/floware') +app.include_router(workflows_router, prefix='/floware') +app.include_router(workflow_pipeline_router, prefix='/floware') +app.include_router(workflow_runs_router, prefix='/floware') +app.include_router(inference_router, prefix='/floware') + +app.include_router(llm_inference_config_router, prefix='/floware') +app.include_router(inference_proxy_router, prefix='/floware') +app.include_router(image_search_router, prefix='/floware') +app.include_router(tools_router, prefix='/floware') +app.include_router(telephony_config_router, prefix='/floware') +app.include_router(tts_config_router, prefix='/floware') +app.include_router(stt_config_router, prefix='/floware') +app.include_router(voice_agent_router, prefix='/floware') +app.include_router(message_processor_router, prefix='/floware') + + +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception): + # Skip HTTPExceptions (they're handled by FastAPI) + if isinstance(exc, HTTPException): + raise exc + + prometheus_middleware = PrometheusMiddleware.get_instance() + if prometheus_middleware: + labels = prometheus_middleware.get_labels(request) + prometheus_middleware.http_errors_total.labels(**labels, status_code=500).inc() + + error_message = 'An unexpected error has occurred while performing this action, please try again' + if environment != 'production': + error_message += f' - {str(exc)}' + + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.error(f'Error in API call [Request ID: {request_id}]: {exc}', exc_info=True) + + exception_response_formatter = ResponseFormatter() + return JSONResponse( + status_code=500, + content=exception_response_formatter.buildErrorResponse(error=error_message), + ) + + +# Wire dependency injection +application_container.wire(modules=[__name__], packages=['floware.controllers']) + +db_repo_container.wire( + modules=[__name__], + packages=[ + 'product_analysis_module.product_analysis_service', + 'image_search_module.services', + ], +) + + +product_analysis_container.wire( + modules=[__name__], + packages=['product_analysis_module.controllers'], +) + +user_module_container.wire( + modules=[__name__], + packages=[ + 'auth_module.controllers', + 'plugins_module.controllers', + 'insights_module.controllers', + 'user_management_module.controllers', + 'user_management_module.authorization', + 'plugins_module.controllers', + ], +) + +auth_container.wire( + modules=[__name__], + packages=[ + 'auth_module.controllers', + 'user_management_module.authorization', + 'user_management_module.controllers', + 'insights_module.controllers', + 'plugins_module.services', + 'plugins_module.controllers', + 'llm_inference_config_module.controllers', + ], +) + +insights_container.wire( + modules=[__name__], + packages=['insights_module.controllers'], +) + +gold_container.wire( + modules=[__name__], + packages=['gold_module.controllers'], +) + +common_container.wire( + modules=[__name__], + packages=[ + 'auth_module.controllers', + 'user_management_module.controllers', + 'user_management_module.authorization', + 'insights_module.controllers', + 'floware.controllers', + 'knowledge_base_module.controllers', + 'gold_module.controllers', + 'plugins_module.controllers', + 'plugins_module.services', + 'product_analysis_module.controllers', + 'agents_module.controllers', + 'agents_module.services', + 'inference_module.controllers', + 'llm_inference_config_module.controllers', + 'tools_module.controllers', + 'image_search_module.controllers', + 'voice_agents_module.controllers', + ], +) + +email_rag_container.wire( + modules=[__name__], + packages=[ + 'knowledge_base_module.controllers', + 'auth_module.controllers', + 'inference_module.controllers', + ], +) + +plugins_container.wire( + modules=[__name__], + packages=[ + 'plugins_module.controllers', + 'plugins_module.services', + 'user_management_module.controllers', + 'user_management_module.authorization', + 'tools_module.datasources', + ], +) + +agents_container.wire( + modules=[__name__], + packages=[ + 'agents_module.controllers', + 'agents_module.services', + ], +) + +inference_container.wire( + modules=[__name__], + packages=['inference_module.controllers'], +) + +llm_inference_config_container.wire( + modules=[__name__], + packages=[ + 'llm_inference_config_module.controllers', + 'agents_module.controllers', + 'knowledge_base_module.controllers', + ], +) + +tools_container.wire( + modules=[__name__], + packages=[ + 'tools_module.controllers', + ], +) + +image_search_container.wire( + modules=[__name__], + packages=['image_search_module.controllers'], +) + +api_services_container.wire( + modules=[__name__], + packages=['api_services_module.core'], +) + +voice_agents_container.wire( + modules=[__name__], + packages=[ + 'voice_agents_module.controllers', + 'voice_agents_module.services', + ], +) + +environment = os.getenv('APP_ENV', 'dev') + +# Running with Uvicorn (for local development) +if __name__ == '__main__': + print(f'Starting application in environment: {environment}') + if environment == 'production': + uvicorn.run( + 'server:app', + host='0.0.0.0', + port=8001, + workers=4, + log_level='critical', + forwarded_allow_ips='*', + ) + else: + dirs = glob.glob('../../..//**/*_module/**', recursive=True) + dirs.extend(glob.glob('../../..//**/plugins/**', recursive=True)) + dirs.extend(glob.glob('../../..//**/packages/**', recursive=True)) + dirs.append('../../floware') + + uvicorn.run( + 'server:app', + host='0.0.0.0', + port=8001, + workers=1, + reload=True, + reload_includes=dirs, + log_level='info', + forwarded_allow_ips='*', + ) diff --git a/wavefront/server/apps/floware/floware/services/config_service.py b/wavefront/server/apps/floware/floware/services/config_service.py new file mode 100644 index 00000000..901774da --- /dev/null +++ b/wavefront/server/apps/floware/floware/services/config_service.py @@ -0,0 +1,81 @@ +from typing import Any + +from db_repo_module.models.config import Config +from fastapi import UploadFile, File, HTTPException +from flo_cloud.cloud_storage import CloudStorageManager +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository + + +class ConfigService: + def __init__( + self, + config_repository: SQLAlchemyRepository[Config], + cloud_manager: CloudStorageManager, + config: dict[str, Any], + ) -> None: + self.config_repository = config_repository + self.cloud_manager = cloud_manager + self.config = config + + def _get_gcp_credentials(self) -> dict[str, Any]: + config_credentials = self.config.get('gcp') + if not isinstance(config_credentials, dict): + raise HTTPException(status_code=500, detail='GCP configuration is missing') + if not config_credentials.get( + 'gcp_asset_storage_bucket' + ) or not config_credentials.get('config_file_name'): + raise HTTPException(status_code=500, detail='Incomplete GCP configuration') + return config_credentials + + async def store_app_config( + self, + file: UploadFile | None = None, + app_config: dict[str, Any] | None = None, + ): + file = file or File(None) + config_credentials = self._get_gcp_credentials() + if file and file.content_type not in ['image/png', 'image/jpeg', 'image/jpg']: + raise HTTPException(status_code=400, detail='Invalid file type') + file_size = getattr(file, 'size', None) + if file_size is not None and file_size > 1024 * 1024 * 1: # 1MB + raise HTTPException(status_code=400, detail='File size is too large') + + file_content = await file.read() if file else None + if file_content: + self.cloud_manager.save_small_file( + file_content, + config_credentials['gcp_asset_storage_bucket'], + config_credentials['config_file_name'], + ) + # if atleast one icon or file_content is there then allow the all_config to be saved + config_data = await self.config_repository.find(key='app_config') + if config_data and config_data[0].value.get('app_icon') or file_content: + # saving the config to the database + await self.config_repository.upsert( + filters={'key': 'app_config'}, + value={ + 'app_icon': config_credentials['config_file_name'], + 'app_config': app_config if app_config else {}, + }, + ) + else: + raise HTTPException(status_code=400, detail='App icon is not set') + return + + async def get_app_config(self): + config_record = await self.config_repository.find(key='app_config') + # checking if the config_record is empty + if not config_record: + return None, None + config_path = config_record[0].value.get('app_icon') + config_credentials = self._get_gcp_credentials() + # Generate new presigned URL + url = self.cloud_manager.generate_presigned_url( + config_credentials['gcp_asset_storage_bucket'], + config_path, + 'get', + ) + # getting the app_config from the database + app_config = config_record[0].value.get('app_config', {}) + + return url, app_config diff --git a/wavefront/server/apps/floware/floware/services/notification_service.py b/wavefront/server/apps/floware/floware/services/notification_service.py new file mode 100644 index 00000000..3590ea52 --- /dev/null +++ b/wavefront/server/apps/floware/floware/services/notification_service.py @@ -0,0 +1,31 @@ +from db_repo_module.models.notification_users import NotificationUser +from db_repo_module.models.notifications import Notification +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository + + +class NotificationService: + def __init__( + self, + notification_repository: SQLAlchemyRepository[Notification], + notification_user_repository: SQLAlchemyRepository[NotificationUser], + ): + self.notification_repository = notification_repository + self.notification_user_repository = notification_user_repository + + async def fetch_notification(self, user_id): + query = """ + SELECT n.id as notification_id, + n.type, + n.title, + n.created_at, + n.updated_at, + nu.user_id, + nu.seen + FROM notification n LEFT JOIN notification_user nu ON n.id = nu.notification_id + AND nu.user_id = :user_id + ORDER BY n.updated_at DESC + """ + result = await self.notification_repository.execute_query( + query, params={'user_id': user_id} + ) + return result diff --git a/wavefront/server/apps/floware/floware/utils/network_utils.py b/wavefront/server/apps/floware/floware/utils/network_utils.py new file mode 100644 index 00000000..5b655293 --- /dev/null +++ b/wavefront/server/apps/floware/floware/utils/network_utils.py @@ -0,0 +1,7 @@ +import requests + + +class NetworkUtils: + def http_get(url: str, headers: dict): + response = requests.get(url=url, headers=headers) + return response.json() diff --git a/wavefront/server/apps/floware/floware/utils/yaml.py b/wavefront/server/apps/floware/floware/utils/yaml.py new file mode 100644 index 00000000..a6e79817 --- /dev/null +++ b/wavefront/server/apps/floware/floware/utils/yaml.py @@ -0,0 +1,10 @@ +import os + +ROOT_DIR = os.getcwd() + + +def read_yaml_as_string(file_path: str) -> str: + config_path = os.path.join(ROOT_DIR, file_path.lstrip('/')) + with open(config_path, 'r') as file: + yaml_content = file.read() + return yaml_content diff --git a/wavefront/server/apps/floware/pyproject.toml b/wavefront/server/apps/floware/pyproject.toml new file mode 100644 index 00000000..834c5c2d --- /dev/null +++ b/wavefront/server/apps/floware/pyproject.toml @@ -0,0 +1,60 @@ +[project] +name = "floware" +version = "0.1.0" +description = "AI middleware for rootflo apps" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "auth-module", + "common-module", + "db-repo-module", + "gold-module", + "insights-module", + "knowledge-base-module", + "user-management-module", + "plugins-module", + "product-analysis-module", + "agents-module", + "inference-module", + "llm-inference-config-module", + "tools-module", + "image-search-module", + "api-services-module", + "voice-agents-module", + + + "fastapi>=0.115.2,<1.0.0", + "uvicorn>=0.30.1,<1.0.0", + "dependency-injector>=4.42.0,<5.0.0", + "python-multipart==0.0.9", + "python-dotenv>=1.1.0,<2.0.0", + "pottery>=3.0.1,<4.0.0" +] + +[tool.uv.sources] +auth-module = { workspace = true } +common-module = { workspace = true } +db-repo-module = { workspace = true } +gold-module = { workspace = true } +insights-module = { workspace = true } +knowledge-base-module = { workspace = true } +user-management-module = { workspace = true } +plugins-module = {workspace = true} +product-analysis-module = {workspace = true } +agents-module = {workspace = true} +inference-module = {workspace = true} +llm-inference-config-module = {workspace = true} +tools-module = {workspace = true} +image-search-module = {workspace = true} +api-services-module = {workspace = true} +voice-agents-module = {workspace = true} + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["floware"] diff --git a/wavefront/server/apps/floware/tests/conftest.py b/wavefront/server/apps/floware/tests/conftest.py new file mode 100644 index 00000000..9b700cbe --- /dev/null +++ b/wavefront/server/apps/floware/tests/conftest.py @@ -0,0 +1,285 @@ +import json +from typing import Any, cast +from unittest.mock import Mock +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer +from floware.controllers.config_controller import config_router +from floware.di.application_container import ApplicationContainer +from flo_cloud.cloud_storage import CloudStorageManager +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.config import Config +from floware.services.config_service import ConfigService +from starlette.middleware import _MiddlewareFactory + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +def _middleware(cls: type[Any]) -> _MiddlewareFactory[Any]: + return cast(_MiddlewareFactory[Any], cls) + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + user_container = UserContainer() + + common_container = CommonContainer() + + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'device_info': 'Mozilla/5.0'} + ) + # For reset password + cache_manager_mock.get_str.side_effect = ( + lambda key: test_user_id + if key == 'mock_reset_code' + else json.dumps({'user_id': test_user_id, 'device_info': 'Mozilla/5.0'}) + ) + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + + user_container.db_client.override(mock_db_client) + user_container.cache_manager.override(cache_manager_mock) + + # Mock token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + 'code': 'mock_reset_code', + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + auth_container = AuthContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + auth_container.token_service.override(mock_token_service) + + # mocking auth container superset_service + mock_superset_service = Mock() + mock_superset_service.generate_guest_token.return_value = 'mock_guest_token' + auth_container.superset_service.override(mock_superset_service) + + user_container.wire( + packages=[ + 'user_management_module.authorization', + 'user_management_module.utils', + 'auth_module.controllers', + ] + ) + + common_container.wire( + packages=[ + 'auth_module.controllers', + 'user_management_module.authorization', + 'floware.controllers', + ] + ) + auth_container.wire( + packages=[ + 'user_management_module.authorization', + ] + ) + + yield auth_container, common_container + auth_container.unwire() + common_container.unwire() + + db_repo_container.unwire() + + +@pytest.fixture +def mock_cloud_manager(): + """Mock CloudStorageManager for testing""" + mock_cloud = Mock(spec=CloudStorageManager) + mock_cloud.save_small_file = Mock() + mock_cloud.generate_presigned_url = Mock( + return_value='https://mock-presigned-url.com/config.png' + ) + return mock_cloud + + +@pytest.fixture +def mock_config_repository(): + """Mock config repository for testing""" + mock_repo = Mock(spec=SQLAlchemyRepository[Config]) + mock_repo.upsert = Mock() + mock_repo.find = Mock(return_value=[Mock(value={'app_icon': 'config.png'})]) + return mock_repo + + +@pytest.fixture +def mock_config(): + """Mock configuration for testing""" + return { + 'gcp': { + 'gcp_asset_storage_bucket': 'test-bucket', + 'config_file_name': 'config.png', + } + } + + +@pytest.fixture +def mock_config_service(): + """Mock config service for testing""" + mock_service = Mock(spec=ConfigService) + + # Mock async methods + async def mock_store_app_config(file, app_config_dict): + return None + + async def mock_get_app_config(): + return 'https://test-bucket.com/config.png', {} + + mock_service.store_app_config = mock_store_app_config + mock_service.get_app_config = mock_get_app_config + + return mock_service + + +@pytest.fixture +def setup_application_container( + mock_cloud_manager, mock_config_repository, mock_config, mock_config_service +): + """Setup ApplicationContainer with mocked dependencies""" + app_container = ApplicationContainer() + + # Override the dependencies + app_container.cloud_manager.override(mock_cloud_manager) + app_container.config_repository.override(mock_config_repository) + app_container.config.override(mock_config) + app_container.config_service.override(mock_config_service) + + # Wire the container + app_container.wire(modules=[__name__], packages=['floware.controllers']) + + yield app_container + + # Cleanup + app_container.unwire() + + +@pytest.fixture +def test_client(setup_containers, setup_application_container): + app = FastAPI() + app.add_middleware(_middleware(RequestIdMiddleware)) + app.add_middleware(_middleware(RequireAuthMiddleware)) + app.include_router(config_router, prefix='/floware') + return TestClient(app) + + +@pytest.fixture +def mock_auth_functions(monkeypatch): + async def mock_get_current_user(request): + return 'test_user_id', 'test_role_id', 'test_session_id' + + async def mock_check_is_admin(role_id): + return True + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.get_current_user', + mock_get_current_user, + ) + + +@pytest.fixture +def mock_admin_functions(monkeypatch): + """Mock check_is_admin to return True for admin tests""" + + async def mock_check_is_admin(role_id, role_repository=None): + return True + + monkeypatch.setattr( + 'floware.controllers.config_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mock_non_admin_functions(monkeypatch): + """Mock check_is_admin to return False for non-admin tests""" + + async def mock_check_is_admin(role_id, role_repository=None): + return False + + monkeypatch.setattr( + 'floware.controllers.config_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token diff --git a/wavefront/server/apps/floware/tests/test_config_controller.py b/wavefront/server/apps/floware/tests/test_config_controller.py new file mode 100644 index 00000000..d4fd0962 --- /dev/null +++ b/wavefront/server/apps/floware/tests/test_config_controller.py @@ -0,0 +1,120 @@ +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +import pytest +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + + +async def create_session( + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, +): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_setting_up_credentials_admin( + test_client, + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, + auth_token, + mock_admin_functions, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.put( + '/floware/v1/settings/config/app-icon', + headers={'Authorization': f'Bearer {auth_token}'}, + data={'app_config': '{"width":"100px","height":"50px"}'}, + files={'file': open('apps/floware/tests/test.png', 'rb')}, + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_with_no_admin( + test_client, + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, + auth_token, + mock_non_admin_functions, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.put( + '/floware/v1/settings/config/app-icon', + headers={'Authorization': f'Bearer {auth_token}'}, + data={'app_config': '{"width":"100px","height":"50px"}'}, + files={'file': open('apps/floware/tests/test.png', 'rb')}, + ) + print(response.json()) + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_without_authorization_header( + test_client, + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.put( + '/floware/v1/settings/config/app-icon', + files={'file': open('apps/floware/tests/test.png', 'rb')}, + ) + assert response.status_code == 401 + + +# write test for get config +@pytest.mark.asyncio +async def test_get_config( + test_client, + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, + auth_token, + mock_admin_functions, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.get( + '/floware/v1/settings/config', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + + +# write test for get config without authorization header +@pytest.mark.asyncio +async def test_get_config_without_authorization_header( + test_client, + test_session: async_sessionmaker[AsyncSession], + test_user_id, + test_session_id, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.get( + '/floware/v1/settings/config', + ) + assert response.status_code == 200 diff --git a/wavefront/server/apps/inference_app/inference_app/__init__.py b/wavefront/server/apps/inference_app/inference_app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/inference_app/inference_app/config.ini b/wavefront/server/apps/inference_app/inference_app/config.ini new file mode 100644 index 00000000..cc12d9df --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/config.ini @@ -0,0 +1,8 @@ +[aws] +model_storage_bucket=${MODEL_STORAGE_BUCKET} + +[gcp] +model_storage_bucket=${MODEL_STORAGE_BUCKET} + +[cloud_config] +cloud_provider=${CLOUD_PROVIDER} diff --git a/wavefront/server/apps/inference_app/inference_app/controllers/__init__.py b/wavefront/server/apps/inference_app/inference_app/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py b/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py new file mode 100644 index 00000000..067b0350 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py @@ -0,0 +1,187 @@ +import base64 +from typing import Any, Dict + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, status +from fastapi.responses import JSONResponse +from inference_app.inference_app_container import InferenceAppContainer +from inference_app.service.image_analyser import ImageClarityService +from inference_app.service.model_inference import ( + ModelInferenceService, + PreprocessingStep, +) +from inference_app.service.model_repository import ModelRepository +from inference_app.service.image_embedding import ImageEmbedding +from pydantic import BaseModel, Field + + +class InferencePayload(BaseModel): + data: str + payload_type: str + model_info: dict + preprocessing_steps: list[PreprocessingStep] + max_expected_variance: int = Field(default=1000) + resize_width: int = Field(default=224) + resize_height: int = Field(default=224) + gaussian_blur_kernel: int = Field(default=3) + min_threshold: int = Field(default=50) + max_threshold: int = Field(default=150) + normalize_mean: str = Field(default='0.485,0.456,0.406') + normalize_std: str = Field(default='0.229,0.224,0.225') + + +class InferenceResult(BaseModel): + results: Dict[str, Any] = Field(..., description='Dictionary of inference results') + + +class ImagePayload(BaseModel): + image_data: str + + +inference_app_router = APIRouter() + + +async def handle_database_error(session, error_msg: str, error) -> JSONResponse: + """Handle database errors and return appropriate response.""" + await session.rollback() + logger.error(f'{error_msg} with error as {str(error)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=ResponseFormatter.buildErrorResponse(str(error)), + ) + + +@inference_app_router.post('/v1/model-repository/model/{model_id}/infer') +@inject +async def generic_inference_handler( + payload: InferencePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + model_repository: ModelRepository = Depends( + Provide[InferenceAppContainer.model_repository] + ), + image_analyser: ImageClarityService = Depends( + Provide[InferenceAppContainer.image_analyser] + ), + config: dict = Depends(Provide[InferenceAppContainer.config]), + model_inference: ModelInferenceService = Depends( + Provide[InferenceAppContainer.model_inference] + ), +): + try: + provider = config['cloud_config']['cloud_provider'] + model_storage_bucket = ( + config['gcp']['model_storage_bucket'] + if provider.lower() == 'gcp' + else config['aws']['model_storage_bucket'] + ) + + logger.info( + f'Loading model from bucket: {model_storage_bucket}, model_info: {payload.model_info}' + ) + model = await model_repository.load_model( + model_info=payload.model_info, bucket_name=model_storage_bucket + ) + logger.debug('Model loaded successfully for model_id') + + if payload.payload_type.lower() == 'image': + base64_data_uri = payload.data + parts = base64_data_uri.split(',') + if len(parts) == 2: + base64_data = parts[1] + image_bytes = base64.b64decode(base64_data) + + clarity_score = image_analyser.laplacian_detection( + image_bytes, payload.max_expected_variance + ) + + infer_data = model_inference.model_infer_score( + model, + image_bytes, + payload.resize_width, + payload.resize_height, + payload.normalize_mean, + payload.normalize_std, + payload.gaussian_blur_kernel, + payload.min_threshold, + payload.max_threshold, + preprocessing_steps=payload.preprocessing_steps, + ) + logger.debug('Model inference completed successfully for model_id') + + inference_results = InferenceResult( + results={ + 'clarity_score': clarity_score, + 'infer_data': infer_data, + 'data_type': payload.payload_type.lower(), + } + ) + + logger.info('Inference request completed successfully for model_id') + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + inference_results.dict() + ), + ) + else: + error_msg = ( + "Input data is not in expected Data URI format (missing 'base64,')." + ) + logger.error( + f"Expected Data URI format with 'base64,' prefix. " + f'Data length: {len(base64_data_uri)}' + ) + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse(error_msg), + ) + else: + error_msg = f"Invalid payload_type: {payload.payload_type}. Accepted values are 'image'" + logger.error(f'{error_msg}') + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Invalid payload_type. Accepted values are "image"' + ), + ) + except Exception as e: + logger.error(f'Error in generic_inference_handler {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse('Internal server error'), + ) + + +@inference_app_router.post('/v1/query/embeddings') +@inject +async def image_embedding( + payload: ImagePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + image_embedding_service: ImageEmbedding = Depends( + Provide[InferenceAppContainer.image_embedding] + ), +): + # 1. Decode Base64 string + base64_data_uri = payload.image_data + parts = base64_data_uri.split(',') + base64_data = parts[1] if len(parts) == 2 else parts[0] + image_data = base64.b64decode(base64_data) + embeddings = image_embedding_service.query_embed(image_data) + if not embeddings: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'No Embedding data is present' + ), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'response': embeddings}), + ) diff --git a/wavefront/server/apps/inference_app/inference_app/inference_app_container.py b/wavefront/server/apps/inference_app/inference_app/inference_app_container.py new file mode 100644 index 00000000..c7eb3055 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/inference_app_container.py @@ -0,0 +1,31 @@ +from dependency_injector import containers +from dependency_injector import providers +from inference_app.service.image_analyser import ImageClarityService +from flo_cloud.cloud_storage import CloudStorageManager +from inference_app.service.model_repository import ModelRepository +from inference_app.service.model_inference import ModelInferenceService +from inference_app.service.image_embedding import ImageEmbedding + + +class InferenceAppContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + cache_manager = providers.Dependency() + + cloud_storage_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) + + model_repository = providers.Singleton( + ModelRepository, + cloud_storage_manager=cloud_storage_manager, + ) + + model_inference = providers.Factory(ModelInferenceService) + + image_analyser = providers.Factory( + ImageClarityService, + ) + + image_embedding = providers.Factory( + ImageEmbedding, + ) diff --git a/wavefront/server/apps/inference_app/inference_app/server.py b/wavefront/server/apps/inference_app/inference_app/server.py new file mode 100644 index 00000000..10274e65 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/server.py @@ -0,0 +1,120 @@ +import glob +import os + +from dotenv import load_dotenv +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +import uvicorn + +# ruff: noqa: E402 +load_dotenv() + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from fastapi import HTTPException +from fastapi import Request +from fastapi.responses import JSONResponse + +from inference_app.inference_app_container import InferenceAppContainer +from inference_app.controllers.inference_controller import inference_app_router + +# Initialize dependency containers +common_container = CommonContainer(cache_manager=None) +inference_app_container = InferenceAppContainer( + cache_manager=None, +) + +app = FastAPI( + title='FloConsole API', + description='Console application for RootFlo platform', + version='1.0.0', +) + + +origins = os.getenv('ALLOWED_ORIGINS', 'http://localhost:5173') +allowed_origins = origins.split(',') + +app.add_middleware(RequestIdMiddleware) +# Configure CORS with proper security settings +app.add_middleware( + CORSMiddleware, + allow_origins=allowed_origins, + allow_credentials=True, + allow_methods=['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], + allow_headers=['*'], + expose_headers=[ + 'X-Content-Type-Options', + 'X-XSS-Protection', + 'X-Frame-Options', + 'Referrer-Policy', + 'Content-Security-Policy', + 'Pragma', + 'Expires', + 'Strict-Transport-Security', + 'Cache-Control', + ], +) + +# Include routers +app.include_router(inference_app_router, prefix='/inference') + + +@app.get('/inference/v1/health') +async def health_check(): + return JSONResponse(content={'status': 'ok'}, status_code=200) + + +@app.exception_handler(Exception) +async def global_exception_handler(request: Request, exc: Exception): + # Skip HTTPExceptions (they're handled by FastAPI) + if isinstance(exc, HTTPException): + raise exc + + error_message = 'An unexpected error has occurred while performing this action, please try again' + if environment != 'production': + error_message += f' - {str(exc)}' + + logger.error(f'Error in API call: {exc}', exc_info=True) + + exception_response_formatter = ResponseFormatter() + return JSONResponse( + status_code=500, + content=exception_response_formatter.buildErrorResponse(error=error_message), + ) + + +environment = os.getenv('APP_ENV', 'dev') + + +common_container.wire( + modules=[__name__], + packages=['inference_app.controllers'], +) + +inference_app_container.wire( + modules=[__name__], + packages=['inference_app.controllers'], +) + + +# Running with Uvicorn (for local development) +if __name__ == '__main__': + print(f'Starting application in environment: {environment}') + if environment == 'production': + uvicorn.run( + 'server:app', host='0.0.0.0', port=8003, workers=1, log_level='critical' + ) + else: + dirs = glob.glob('apps/inference-app/inference_app/**/*.py', recursive=True) + + uvicorn.run( + 'server:app', + host='0.0.0.0', + port=8003, + workers=1, + reload=True, + reload_includes=dirs, + log_level='info', + ) diff --git a/wavefront/server/apps/inference_app/inference_app/service/__init__.py b/wavefront/server/apps/inference_app/inference_app/service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/inference_app/inference_app/service/image_analyser.py b/wavefront/server/apps/inference_app/inference_app/service/image_analyser.py new file mode 100644 index 00000000..e7a89f0a --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/service/image_analyser.py @@ -0,0 +1,22 @@ +import cv2 +from common_module.log.logger import logger +from inference_app.utils.image_utils import decode_image_from_bytes + + +class ImageClarityService: + def __init__(self): + pass + + def laplacian_detection(self, image_bytes, max_expected_variance): + # Decode image from bytes array + logger.info( + f'Successfully decoded Base64 string. Data length: {len(image_bytes)} bytes.' + ) + images = decode_image_from_bytes(image_bytes) + images = cv2.resize(images, (256, 256), interpolation=cv2.INTER_AREA) + gray = cv2.cvtColor(images, cv2.COLOR_BGR2GRAY) + laplacian = cv2.Laplacian(gray, cv2.CV_64F) + variance = laplacian.var() + clamped_variance = min(variance, int(max_expected_variance)) + score = (clamped_variance / int(max_expected_variance)) * 100 + return int(score) diff --git a/wavefront/server/apps/inference_app/inference_app/service/image_embedding.py b/wavefront/server/apps/inference_app/inference_app/service/image_embedding.py new file mode 100644 index 00000000..106eba37 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/service/image_embedding.py @@ -0,0 +1,71 @@ +import torch +from transformers import CLIPProcessor, CLIPModel, AutoImageProcessor, AutoModel +from PIL import Image +import io +from typing import List, Dict, Any +from common_module.log.logger import logger + + +class ImageEmbedding: + CLIP_MODEL_NAME = 'openai/clip-vit-base-patch32' + DINO_MODEL_NAME = 'facebook/dinov3-vitl16-pretrain-lvd1689m' + + def __init__(self): + self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + logger.info(f'Using device: {self.device}') + + self.clip_processor = CLIPProcessor.from_pretrained(self.CLIP_MODEL_NAME) + self.clip_model = CLIPModel.from_pretrained(self.CLIP_MODEL_NAME).to( + self.device + ) + self.clip_model.eval() + + self.dino_processor = AutoImageProcessor.from_pretrained(self.DINO_MODEL_NAME) + self.dino_model = AutoModel.from_pretrained( + self.DINO_MODEL_NAME, trust_remote_code=True + ).to(self.device) + self.dino_model.eval() + + self.embedders: Dict[str, Dict[str, Any]] = { + 'clip': { + 'processor': self.clip_processor, + 'model': self.clip_model, + 'extractor': self._extract_clip_features, + }, + 'dino': { + 'processor': self.dino_processor, + 'model': self.dino_model, + 'extractor': self._extract_dino_features, + }, + } + + def _extract_clip_features(self, inputs: Dict[str, Any]) -> torch.Tensor: + return self.clip_model.get_image_features(**inputs) + + def _extract_dino_features(self, inputs: Dict[str, Any]) -> torch.Tensor: + outputs = self.dino_model(**inputs) + return outputs.last_hidden_state[:, 0] + + @torch.inference_mode() + def query_embed(self, image_content: bytes) -> List[Dict[str, List[float]]]: + try: + image = Image.open(io.BytesIO(image_content)).convert('RGB') + except Exception as e: + print(f'Error opening image: {e}') + return [] + + results = [] + + for name, embedder in self.embedders.items(): + inputs = embedder['processor'](images=image, return_tensors='pt') + + inputs = {k: v.to(self.device) for k, v in inputs.items()} + + image_features = embedder['extractor'](inputs) + + image_features = image_features / image_features.norm(dim=-1, keepdim=True) + embedding = image_features.squeeze().cpu().numpy().tolist() + + results.append({name: embedding}) + + return results diff --git a/wavefront/server/apps/inference_app/inference_app/service/model_inference.py b/wavefront/server/apps/inference_app/inference_app/service/model_inference.py new file mode 100644 index 00000000..929ccf3e --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/service/model_inference.py @@ -0,0 +1,99 @@ +import cv2 +import torchvision.transforms as transforms +from PIL import Image +import torch +import numpy as np +from pydantic import BaseModel, Field +from inference_app.utils.image_utils import decode_image_from_bytes + + +class PreprocessingStep(BaseModel): + preprocess_filter: str + values: list = Field(default_factory=list) + + +class ModelInferenceService: + def __init__(self): + self.device = torch.device('cpu') + + def preprocess_image( + self, + image_bytes, + gaussian_blur_kernel, + min_threshold, + max_threshold, + preprocessing_steps: list[PreprocessingStep], + ): + """Apply preprocessing steps based on provided flags.""" + processed_image = decode_image_from_bytes(image_bytes) + + # Define available preprocessing functions + preprocessing_functions = { + 'gray': lambda img, values: cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), + 'gaussian_blur': lambda img, values: cv2.GaussianBlur( + img, (gaussian_blur_kernel, gaussian_blur_kernel), 0 + ), + 'canny': lambda img, values: cv2.cvtColor( + cv2.Canny(img, min_threshold, max_threshold), cv2.COLOR_GRAY2RGB + ), + 'kernel_sharpening': lambda img, values: cv2.filter2D( + img, -1, np.array([[-1, -1, -1], [-1, 9, -1], [-1, -1, -1]]) + ), + } + for step in preprocessing_steps: + filter_name = step.preprocess_filter + values = step.values + if filter_name and filter_name in preprocessing_functions: + processed_image = preprocessing_functions[filter_name]( + processed_image, values + ) + else: + continue + + pil_image = Image.fromarray(processed_image) + return pil_image + + def model_infer_score( + self, + model, + image_bytes, + resize_width, + resize_height, + normalize_mean, + normalize_std, + gaussian_blur_kernel, + min_threshold, + max_threshold, + preprocessing_steps: list[PreprocessingStep], + ): + """ + Predict overlap score for a single image using the same preprocessing as training + """ + # Define the same transform used during validation + normalize_mean = [float(x) for x in normalize_mean.split(',')] + normalize_std = [float(x) for x in normalize_std.split(',')] + transform = transforms.Compose( + [ + transforms.Resize((resize_width, resize_height)), + transforms.ToTensor(), + transforms.Normalize(mean=normalize_mean, std=normalize_std), + ] + ) + # Apply the same preprocessing as during training + preprocessed_image = self.preprocess_image( + image_bytes, + gaussian_blur_kernel, + min_threshold, + max_threshold, + preprocessing_steps, + ) + + # Apply transforms + image_tensor = transform(preprocessed_image).unsqueeze(0).to(self.device) + model.to(self.device) + # Predict + model.eval() + with torch.no_grad(): + response = model(image_tensor).item() + + return response diff --git a/wavefront/server/apps/inference_app/inference_app/service/model_repository.py b/wavefront/server/apps/inference_app/inference_app/service/model_repository.py new file mode 100644 index 00000000..831dc8a0 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/service/model_repository.py @@ -0,0 +1,68 @@ +from typing import Dict +import os +import torch +import io +from common_module.log.logger import logger +from flo_cloud.cloud_storage import CloudStorageManager + + +class ModelRepository: + def __init__( + self, + cloud_storage_manager: CloudStorageManager, + ): + self.cloud_storage_manager = cloud_storage_manager + self.model_storage_dir = os.getenv('MODEL_STORAGE_DIR', './models') + os.makedirs(self.model_storage_dir, exist_ok=True) + # Cache for loaded models - stores model instances in memory + self._model_cache: Dict[str, torch.nn.Module] = {} + + def _is_model_cached_locally( + self, model_name: str, file_path: str, expected_local_model_dir: str + ) -> bool: + """ + Checks if the model is available in the local persistent storage. + """ + return os.path.exists( + expected_local_model_dir + ) and f'{model_name}.{file_path.split(".")[-1]}' in os.listdir( + expected_local_model_dir + ) + + async def load_model(self, model_info: dict, bucket_name: str): + model_id = model_info['model_id'] + expected_local_model_dir = self.model_storage_dir + model_name = model_info['model_name'] + file_path = model_info['model_path'] + model_id = model_info['model_id'] + + local_model_filename = os.path.join( + expected_local_model_dir, f'{model_name}.{file_path.split(".")[1]}' + ) + local_model_full_path = os.path.join(local_model_filename) + + if self._is_model_cached_locally( + model_name, file_path, expected_local_model_dir + ): + logger.info(f'Model {model_id} found in local persistent storage, loading.') + if model_id in self._model_cache: + return self._model_cache[model_id] + else: + with open(local_model_full_path, 'rb') as f: + model_bytes_data = f.read() + return torch.load(io.BytesIO(model_bytes_data), weights_only=False) + else: + logger.info( + f'Model {model_id} not found in local persistent storage, loading from cloud storage.' + ) + model_bytes_data = self.cloud_storage_manager.read_file( + bucket_name, file_path + ) + model = torch.load(io.BytesIO(model_bytes_data), weights_only=False) + # Save to local persistent storage after fetching from cloud + os.makedirs(os.path.dirname(local_model_full_path), exist_ok=True) + with open(local_model_full_path, 'wb') as f: + f.write(model_bytes_data) + self._model_cache[model_id] = model + logger.info(f'Model {model_id} loaded and cached in memory.') + return model diff --git a/wavefront/server/apps/inference_app/inference_app/utils/__init__.py b/wavefront/server/apps/inference_app/inference_app/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/apps/inference_app/inference_app/utils/image_utils.py b/wavefront/server/apps/inference_app/inference_app/utils/image_utils.py new file mode 100644 index 00000000..f44494f1 --- /dev/null +++ b/wavefront/server/apps/inference_app/inference_app/utils/image_utils.py @@ -0,0 +1,34 @@ +import cv2 +import numpy as np +import io +from PIL import Image +from common_module.log.logger import logger + + +def decode_image_from_bytes(image_bytes: bytes): + """ + Decodes an image from bytes using OpenCV, with a Pillow fallback. + + Args: + image_bytes: The image data as bytes. + + Returns: + The decoded image as a NumPy array (OpenCV format). + + Raises: + ValueError: If the image could not be decoded. + """ + nparr = np.frombuffer(image_bytes, np.uint8) + image = cv2.imdecode(nparr, cv2.IMREAD_COLOR) + logger.info(f'Image decoding output is printed here: {image is not None}.') + if image is None: + try: + # Fallback to Pillow + img_pil = Image.open(io.BytesIO(image_bytes)) + # Convert PIL Image to an OpenCV compatible format + image = cv2.cvtColor(np.array(img_pil), cv2.COLOR_RGB2BGR) + logger.info(f'Pillow fallback successful. {image is not None}.') + except Exception as pil_e: + logger.error(f'Pillow (PIL) fallback also failed: {pil_e}') + raise ValueError('Could not decode image from bytes') + return image diff --git a/wavefront/server/apps/inference_app/pyproject.toml b/wavefront/server/apps/inference_app/pyproject.toml new file mode 100644 index 00000000..977aefc8 --- /dev/null +++ b/wavefront/server/apps/inference_app/pyproject.toml @@ -0,0 +1,36 @@ +[project] +name = "inference-app" +version = "0.1.0" +description = "Inference application for PyTorch models" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "flo-cloud", + "fastapi>=0.115.2,<1.0.0", + "uvicorn>=0.30.1,<1.0.0", + "dependency-injector>=4.42.0,<5.0.0", + "python-multipart==0.0.9", + "python-dotenv>=1.1.0,<2.0.0", + "torchvision==0.16.0", + "opencv-python>=4.11.0.86", + "pillow>=11.1.0,<12", + "psycopg2>=2.9.10,<3.0.0", + "numpy>=1.26.4,<2.0.0", + "accelerate>=0.33.0,<1.0.0", + "transformers>=4.45.0" +] + +[tool.uv.sources] +common-module = { workspace = true } +flo-cloud = { workspace = true } + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["inference_app"] diff --git a/wavefront/server/background_jobs/rag_ingestion/pyproject.toml b/wavefront/server/background_jobs/rag_ingestion/pyproject.toml new file mode 100644 index 00000000..43048ada --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/pyproject.toml @@ -0,0 +1,34 @@ +[project] +name = "rag-ingestion" +version = "0.0.1" +description = "AI Insights from Voice" +authors = [ + {name = "rootflo", email = "*@rootfor.xyz"} +] +requires-python = ">=3.11" +dependencies = [ + "flo-cloud", + "flo-utils", + "db-repo-module", + "tiktoken>=0.9.0", + "textract>=1.6.5", + "transformers>=4.45.0", + "asyncpg>=0.30.0", + "httpx>=0.28.1", + "python-dotenv>=1.1.0,<2.0.0", + "pyjwt[crypto]>=2.9.0", + "torchvision==0.16.0", + "accelerate>=0.33.0,<1.0.0", +] + +[tool.uv.sources] +flo-cloud = { workspace = true } +flo-utils = { workspace = true } +db-repo-module = { workspace = true } + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["rag_ingestion"] diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/auth.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/auth.py new file mode 100644 index 00000000..feba2762 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/constants/auth.py @@ -0,0 +1,10 @@ +"""Authentication constants.""" + + +class RootfloHeaders: + CLIENT_KEY = 'X-Rootflo-Key' + PASSTHROUGH = 'X-Passthrough' + + +AUTH_ROLE_ID = 'rag_ingestion' +SERVICE_AUTH_ROLE_ID = 'rag-service' diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/embed.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/embed.py new file mode 100644 index 00000000..d6f43103 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/embed.py @@ -0,0 +1,44 @@ +from rag_ingestion.models.knowledge_base_embeddings import KnowledgeBaseEmbeddingObject +import requests +from rag_ingestion.env import EMBEDDING_SERVICE_URL +from flo_utils.utils.log import logger + + +class EmbeddingFunc: + def __init__(self): + self.max_batch_size = 32 + self.bgm_url = f'{EMBEDDING_SERVICE_URL}/v1/embeddings' + logger.info(f'The embedding url is {EMBEDDING_SERVICE_URL}') + + def generate_document_embeddings(self, chunks): + contents = [v['content'] for v in chunks.values()] + batches = [ + contents[i : i + self.max_batch_size] + for i in range(0, len(contents), self.max_batch_size) + ] + embeddings = [self.bgm_embedding(batch) for batch in batches[0]] + data_list = [] + for i, (k, v) in enumerate(chunks.items()): + data_list.append( + KnowledgeBaseEmbeddingObject( + embedding_vector=embeddings[i], + chunk_text=v['content'], + chunk_index=k, + ) + ) + return data_list, embeddings + + def generate_chunk_embeddings(self, chunks): + embeddings = [self.bgm_embedding(chunks)] + return embeddings + + def bgm_embedding(self, texts): + response = requests.post( + self.bgm_url, + json={ + 'model': 'BAAI/bge-m3', + 'input': texts, + 'encoding_format': 'float', + }, + ) + return response.json()['data'][0]['embedding'] diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/image_embed.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/image_embed.py new file mode 100644 index 00000000..fd869371 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/embeddings/image_embed.py @@ -0,0 +1,71 @@ +import torch +from transformers import CLIPProcessor, CLIPModel, AutoImageProcessor, AutoModel +from PIL import Image +import io +from rag_ingestion.models.knowledge_base_embeddings import KnowledgeBaseEmbeddingObject + + +class ImageEmbedding: + def __init__(self): + self.device = 'cuda' if torch.cuda.is_available() else 'cpu' + print(f'Initializing models on device: {self.device}') + + # CLIP Model (Fixed: Added .to(self.device)) + self.clip_model_name = 'openai/clip-vit-base-patch32' + self.model = ( + CLIPModel.from_pretrained(self.clip_model_name).to(self.device).eval() + ) + self.processor = CLIPProcessor.from_pretrained(self.clip_model_name) + + # DINO Model (No change needed for device_map="auto") + self.dino_model_name = 'facebook/dinov3-vitl16-pretrain-lvd1689m' + self.dino_processor = AutoImageProcessor.from_pretrained(self.dino_model_name) + self.dino_model = AutoModel.from_pretrained( + self.dino_model_name, device_map='auto', trust_remote_code=True + ).eval() + + def embed_image(self, file_content: bytes) -> KnowledgeBaseEmbeddingObject: + image = Image.open(io.BytesIO(file_content)) + if image.mode != 'RGB': + image = image.convert('RGB') + + # CLIP Inputs (Fixed: Added .to(self.device)) + inputs = self.processor(images=image, return_tensors='pt').to(self.device) + + # --- CLIP EMBEDDING --- + with torch.no_grad(): + image_features = self.model.get_image_features(**inputs) + image_features = image_features / image_features.norm(dim=-1, keepdim=True) + embedding = image_features.squeeze().cpu().numpy().tolist() + + # --- DINO EMBEDDING CALL --- + dino_embedding = self.embed_image_dino(file_content) + + # Pass the DINO embedding to the correct field + return KnowledgeBaseEmbeddingObject( + embedding_vector=embedding, + embedding_vector_1=dino_embedding, + chunk_text='image data', + chunk_index='chunk_0', + ) + + @torch.inference_mode() + def embed_image_dino(self, file_content: bytes) -> list: + image = Image.open(io.BytesIO(file_content)) + if image.mode != 'RGB': + image = image.convert('RGB') + + inputs = self.dino_processor(images=image, return_tensors='pt') + + target_device = self.dino_model.device + # Move inputs to the DINO model's device + inputs = {k: v.to(target_device) for k, v in inputs.items()} + + outputs = self.dino_model(**inputs) + + image_features = outputs.last_hidden_state[:, 0] + + image_features = image_features / image_features.norm(dim=-1, keepdim=True) + embedding = image_features.squeeze().cpu().numpy().tolist() + + return embedding diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/env.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/env.py new file mode 100644 index 00000000..415327f4 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/env.py @@ -0,0 +1,11 @@ +from dotenv import load_dotenv +import os + +load_dotenv() + +CLOUD_PROVIDER = os.getenv('CLOUD_PROVIDER', 'gcp') +RETRY_COUNT = os.getenv('RETRY_COUNT', 3) +EMBEDDING_SERVICE_URL = os.getenv('EMBEDDING_SERVICE_URL') +FLOWARE_SERVICE_URL = os.getenv('FLOWARE_SERVICE_URL') +APP_ENV = os.getenv('APP_ENV', 'dev') +PASSTHROUGH_SECRET = os.getenv('PASSTHROUGH_SECRET') diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/main.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/main.py new file mode 100644 index 00000000..50a5931c --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/main.py @@ -0,0 +1,42 @@ +from rag_ingestion.stream.rag_streamer import RagStreamListener +from rag_ingestion.processors.kb_storage_processor import KbStorageProcessor +from db_repo_module.cache.cache_manager import CacheManager +from flo_cloud.kms import FloKmsService +from rag_ingestion.env import CLOUD_PROVIDER, RETRY_COUNT +from flo_cloud.cloud_storage import CloudStorageManager +from flo_cloud.message_queue import MessageQueueManager +import os + + +def main(): + # Original main logic for running workers + event_manager = MessageQueueManager(CLOUD_PROVIDER) + storage_manager = CloudStorageManager(CLOUD_PROVIDER) + cache_manager = CacheManager(namespace='rag') + encryption_service = None + if ( + (CLOUD_PROVIDER == 'aws' and os.getenv('AWS_KMS_ARN') is not None) + or CLOUD_PROVIDER == 'gcp' + and ( + os.getenv('GCP_KMS_KEY_RING') is not None + and os.getenv('GCP_KMS_CRYPTO_KEY') is not None + ) + ): + encryption_service = FloKmsService(cloud_provider=CLOUD_PROVIDER) + + # Initialize stream listener + listener = RagStreamListener( + event_manager=event_manager, + processor=KbStorageProcessor( + storage_manager, + encryption_service, + ), + cache_manager=cache_manager, + retry_count=RETRY_COUNT, + ) + + listener.run_workers(thread_count=2) + + +if __name__ == '__main__': + main() diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/doc_content.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/doc_content.py new file mode 100644 index 00000000..b07cd30b --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/doc_content.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass +from typing import Union + + +@dataclass +class DocContent: + """Model representing the extracted content from a document file""" + + content: Union[str, bytes] + parse_type: str diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/knowledge_base_embeddings.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/knowledge_base_embeddings.py new file mode 100644 index 00000000..33fd2a2a --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/knowledge_base_embeddings.py @@ -0,0 +1,19 @@ +from dataclasses import dataclass, field +from typing import List, Optional + + +@dataclass +class KnowledgeBaseEmbeddingObject: + embedding_vector: List[float] + chunk_text: str + chunk_index: str + embedding_vector_1: Optional[List[float]] = field(default_factory=list) + + +@dataclass +class RetrieveParams: + kb_id: str + threshold: Optional[float] = 0.2 + top_k: Optional[int] = 5 + vector_weight: Optional[float] = 0.7 + keyword_weight: Optional[float] = 0.3 diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/rag_message.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/rag_message.py new file mode 100644 index 00000000..17f8b5cd --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/models/rag_message.py @@ -0,0 +1,15 @@ +from flo_utils.streaming.event_message import BaseEventMessage +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class RagEventMessage(BaseEventMessage): + """Event message for gold-related events.""" + + bucket_name: str + bucket_key: str + kb_id: Optional[str] = None + doc_id: Optional[str] = None + parse_type: Optional[str] = None + file_type: Optional[str] = None diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/file_processor.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/file_processor.py new file mode 100644 index 00000000..abada45a --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/file_processor.py @@ -0,0 +1,48 @@ +import os +import tempfile +import textract +from typing import Union + + +class FileProcessor: + def process_file(self, file_content: bytes, file_type: str) -> Union[str, bytes]: + mime_type = file_type + + if mime_type.startswith('text/plain'): + return file_content.decode('utf-8') + + if mime_type.startswith('image/'): + return file_content + + if mime_type.startswith('application/'): + try: + sub_type = mime_type.split('/')[1] + except IndexError: + raise ValueError( + f'Unsupported file type: Malformed MIME type "{mime_type}"' + ) + + # Set delete=False to keep the file until we manually call os.unlink + with tempfile.NamedTemporaryFile( + mode='w+b', delete=False, suffix=f'.{sub_type}' + ) as temp_file: + temp_file.write(file_content) + temp_file.flush() # Ensure data is written to disk before processing + temp_file_path = temp_file.name + + try: + # Process the file using its path + text_content = textract.process( + temp_file_path, method='pdfminer' + ).decode('utf-8') + return text_content + + except Exception as e: + # Re-raise processing errors + raise RuntimeError(f'Text extraction failed for {mime_type}: {e}') + + finally: + os.unlink(temp_file_path) + + else: + raise ValueError(f'Unsupported file type: {mime_type}') diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/kb_storage_processor.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/kb_storage_processor.py new file mode 100644 index 00000000..ea57f641 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/processors/kb_storage_processor.py @@ -0,0 +1,148 @@ +from flo_cloud.cloud_storage import CloudStorageManager +from dataclasses import dataclass +from typing import List, Union +from flo_utils.utils.log import logger +from rag_ingestion.service.kb_rag_storage import KBRagStorage +from rag_ingestion.embeddings.embed import EmbeddingFunc +from rag_ingestion.models.doc_content import DocContent +from rag_ingestion.stream.queue_message import QueueMessage +from flo_cloud.kms import FloKmsService +from flo_utils.streaming.message_processor import MessageProcessor, ProcessingResult +from rag_ingestion.processors.file_processor import FileProcessor +from rag_ingestion.embeddings.image_embed import ImageEmbedding +from rag_ingestion.models.knowledge_base_embeddings import KnowledgeBaseEmbeddingObject +from rag_ingestion.models.rag_message import RagEventMessage +from rag_ingestion.service.kb_rag_storage import EmbeddingsToStore + + +@dataclass +class KbStorageInsights: + doc_id: str + doc_content: DocContent + kb_id: str + file_type: str + + +class KbStorageProcessor(MessageProcessor): + def __init__( + self, + storage_manager: CloudStorageManager, + encryption_service: FloKmsService, + ): + self.storage_manager = storage_manager + self.encryption_service = encryption_service + self.kb_rag_storage = KBRagStorage() + self.embedding_func = EmbeddingFunc() + self.file_processor = FileProcessor() + self.image_embedding = ImageEmbedding() + + async def _extract_text_from_message( + self, message: QueueMessage, file_content: bytes + ) -> DocContent: + """ + Extracts text content from a message based on its parse_type and file_type. + + Args: + message: An object with 'parse_type' and 'file_type' attributes. + file_content: The binary content of the file. + + Returns: + A DocContent object with extracted content and parse_type. + """ + if message.parse_type != 'kb_insertion': + return DocContent(content='', parse_type=message.parse_type) + content: Union[str, bytes] = self.file_processor.process_file( + file_content, str(message.file_type) + ) + return DocContent(content=content, parse_type=message.parse_type) + + def __insert_kb_from_message( + self, insights: List[ProcessingResult[KbStorageInsights]] + ): + """ + Processes a message transcript and inserts KB embeddings if the conversation type is 'kb_insertion'. + + Args: + message: An object with a 'doc_id' field. + doc_content: A DocContent object containing the extracted text. + + Returns: + None + """ + try: + embeddings: List[EmbeddingsToStore] = [] + for kb_insight in insights: + kb_id = kb_insight.insights.kb_id + doc_id = kb_insight.insights.doc_id + file_type = kb_insight.insights.file_type + + logger.info('Embeddings storing process is started') + if file_type and file_type.lower() in ('pdf', 'application/pdf'): + extracted_docs = [kb_insight.insights.doc_content.content] + docs: List[KnowledgeBaseEmbeddingObject] = ( + self.kb_rag_storage.process_document(extracted_docs) + ) + else: + image_data = [kb_insight.insights.doc_content.content] + docs: List[KnowledgeBaseEmbeddingObject] = [ + self.image_embedding.embed_image(image_data) + for image_data in image_data + ] + + embeddings.append( + EmbeddingsToStore( + kb_embeddings=docs, + doc_id=doc_id, + kb_id=kb_id, + file_type=file_type, + ) + ) + + self.kb_rag_storage.upload_embedding_with_retry(embeddings=embeddings) + logger.info('Embeddings are stored in the db') + except Exception as err: + logger.info(f'The error message captured is {err}') + + async def process( + self, message: RagEventMessage + ) -> ProcessingResult[KbStorageInsights]: + """ + Main public interface for processing messages and generating embeddings. + + Args: + message: Queue message containing document information + + Returns: + ProcessingResult indicating success/failure and any insights + """ + logger.info(f'Processing message: {message.id}') + logger.info(f'Processing file: {message.bucket_name}/{message.bucket_key}') + + file_content_encrypt = self.storage_manager.read_file( + message.bucket_name, message.bucket_key + ) + file_content = ( + self.encryption_service.decrypt(file_content_encrypt) + if self.encryption_service + else file_content_encrypt + ) + doc_content = await self._extract_text_from_message(message, file_content) + return ProcessingResult[KbStorageInsights]( + success=True, + insights=KbStorageInsights( + doc_id=message.doc_id, + doc_content=doc_content, + kb_id=message.kb_id, + file_type=message.file_type, + ), + ) + + def store(self, insights: List[ProcessingResult[KbStorageInsights]]): + if not insights: + return False + try: + self.__insert_kb_from_message(insights) + return True + except Exception as e: + logger.error(f'Failed to store data to the database: {e}') + return False diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/kb_rag_storage.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/kb_rag_storage.py new file mode 100644 index 00000000..b408e531 --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/service/kb_rag_storage.py @@ -0,0 +1,431 @@ +from typing import Any, List, Dict, Tuple, Optional +import logging +import numpy as np +import tiktoken +from datetime import datetime +import uuid +from rag_ingestion.embeddings.embed import EmbeddingFunc +import httpx +import time +from dataclasses import dataclass +from rag_ingestion.env import FLOWARE_SERVICE_URL, APP_ENV, PASSTHROUGH_SECRET +from rag_ingestion.constants.auth import RootfloHeaders +from rag_ingestion.models.knowledge_base_embeddings import ( + KnowledgeBaseEmbeddingObject, + RetrieveParams, +) +from flo_utils.utils.log import logger +import ast + + +@dataclass +class EmbeddingsToStore: + kb_embeddings: List[KnowledgeBaseEmbeddingObject] + doc_id: str + kb_id: str + file_type: str + + +class KBRagStorage: + """Configuration class for EmailRag settings.""" + + def __init__(self): + self.llm_model_name = 'flora-q8' + self.embedding_model = 'mxbai-embed-large' + self.embedding_dim = 1024 + self.max_token_size = 8500 + self.tiktoken_model = 'gpt-4o' + self.chunk_size = 1200 + self.chunk_overlap = 128 + self.embedding = EmbeddingFunc() + self.logger = logging.getLogger(__name__) + self.app_env = APP_ENV + self.passthrough_secret = PASSTHROUGH_SECRET + + def _fetch_headers(self) -> dict: + """ + Fetch headers for HTTP requests to floware service. + Adds passthrough authentication header for non-production environments. + + Returns: + dict: Headers to include in HTTP requests + """ + headers = {'Content-Type': 'application/json'} + + # Add passthrough header for non-production environments + if self.app_env != 'production' and self.passthrough_secret: + headers[RootfloHeaders.PASSTHROUGH] = self.passthrough_secret + + return headers + + def __encode_string_by_tiktoken(self, content: str, model_name: str = 'gpt-4o'): + encoder = tiktoken.encoding_for_model(model_name) + tokens = encoder.encode(content) + return tokens + + def __decode_tokens_by_tiktoken( + self, tokens: list[int], model_name: str = 'gpt-4o' + ): + decoder = tiktoken.encoding_for_model(model_name) + content = decoder.decode(tokens) + return content + + def __clean_text(self, content: str) -> str: + """ + Clean and normalize text content from documents. + + Args: + content: The raw text content to clean + + Returns: + Cleaned and normalized text + """ + if not content or not isinstance(content, str): + return '' + + # Basic cleaning + content = content.replace('\x00', '') # Remove null bytes + return content + + def __extract_documents( + self, contents: List[str] + ) -> List[List[Tuple[str, Dict[str, Any]]]]: + """ + Extract content from files with improved error handling and parallel processing. + + Args: + contents: List of text contents to process + + Returns: + List of document tuples containing (doc_id, doc_content) + """ + if not contents: + self.logger.warning('No contents provided for extraction') + return [] + + # Process contents + results = [] + for content in contents: + processed_content = content + if processed_content: + results.append(processed_content) + + if not results: + return [] + + # Clean and process results + cleaned_content = [(self.__clean_text(content)) for content in results] + + # Create document structure + docs = { + f'doc_{index}': { + 'content': content, + 'content_length': len(content), + 'created_at': datetime.now().isoformat(), + 'updated_at': datetime.now().isoformat(), + } + for index, (content) in enumerate(cleaned_content) + } + + return docs + + def __chunk_with_langchain_recursive( + self, + content: str, + tiktoken_model: str, + chunk_size: int, + chunk_overlap: int, + separators: List[str] = ['\n\n', '\n', ' ', ''], + ) -> List[Dict[str, Any]]: + """ + Chunk content using LangChain's RecursiveCharacterTextSplitter. + + Args: + content: The text content to chunk + tiktoken_model: The tiktoken model to use + chunk_size: Approximate chunk size in characters + chunk_overlap: Character overlap between chunks + separators: List of separators for recursive splitting + + Returns: + List of chunks with token counts and content + """ + try: + return self.__chunk_with_custom_splitter( + content, + self.max_token_size, + self.chunk_overlap, + tiktoken_model, + chunk_size, + chunk_overlap, + separators, + ) + except Exception as e: + self.logger.error( + f'Error using LangChain RecursiveCharacterTextSplitter: {e}' + ) + return self.__fallback_chunking( + content, self.max_token_size, self.chunk_overlap, tiktoken_model + ) + + def __chunk_with_custom_splitter( + self, + content: str, + max_token_size: int, + overlap_token_size: int, + tiktoken_model: str, + chunk_size: int, + chunk_overlap: int, + separators: List[str], + ) -> List[Dict[str, Any]]: + """Handle chunking using custom recursive text splitter.""" + results = [] + + # Default separators if none provided + if not separators: + separators = ['\n\n', '\n', ' ', ''] + + def recursive_split(text: str, seps: List[str]) -> List[str]: + if not seps or len(text) <= chunk_size: + return [text] if text.strip() else [] + + sep = seps[0] + splits = text.split(sep) if sep else list(text) + + # Keep separator with text + if sep: + splits = [splits[0]] + [sep + s for s in splits[1:] if s] + + chunks = [] + current = '' + + for split in splits: + if len(split) > chunk_size: + # Add current chunk if exists + if current: + chunks.append(current) + # Add overlap + if chunk_overlap > 0: + current = ( + current[-chunk_overlap:] + if len(current) > chunk_overlap + else '' + ) + else: + current = '' + + # Recursively split large piece + chunks.extend(recursive_split(split, seps[1:])) + + elif len(current) + len(split) <= chunk_size: + current += split + else: + # Start new chunk + if current: + chunks.append(current) + # Add overlap + if chunk_overlap > 0 and len(current) > chunk_overlap: + current = current[-chunk_overlap:] + split + else: + current = split + else: + current = split + + if current: + chunks.append(current) + + return [c for c in chunks if c.strip()] + + # Split content into chunks + chunks = recursive_split(content, separators) + # Process each chunk + for chunk_index, chunk_text in enumerate(chunks): + tokens = self.__encode_string_by_tiktoken(chunk_text) + + if len(tokens) > max_token_size: + results.extend( + self.__split_large_chunk( + tokens, max_token_size, overlap_token_size, tiktoken_model + ) + ) + else: + results.append( + { + 'tokens': len(tokens), + 'content': chunk_text.strip(), + 'chunk_order_index': len(results), + 'chunk_index': chunk_index, + 'metadata': {'start_index': content.find(chunk_text)}, + } + ) + + return results + + def __split_large_chunk( + self, tokens: List[int], max_tokens: int, overlap: int, model: str + ) -> List[Dict[str, Any]]: + """Split a large chunk into smaller pieces.""" + results = [] + for start in range(0, len(tokens), max_tokens - overlap): + end = min(start + max_tokens, len(tokens)) + chunk_content = self.__decode_tokens_by_tiktoken( + tokens[start:end], + model_name=model, + ) + results.append( + { + 'tokens': end - start, + 'content': chunk_content.strip(), + 'chunk_order_index': len(results), + } + ) + return results + + def __fallback_chunking( + self, content: str, max_tokens: int, overlap: int, model: str + ) -> List[Dict[str, Any]]: + """Fallback chunking method when LangChain fails.""" + results = [] + tokens = self.__encode_string_by_tiktoken(content) + for index, start in enumerate(range(0, len(tokens), max_tokens - overlap)): + end = min(start + max_tokens, len(tokens)) + chunk_content = self.__decode_tokens_by_tiktoken( + tokens[start:end], model_name=model + ) + results.append( + { + 'tokens': end - start, + 'content': chunk_content.strip(), + 'chunk_order_index': index, + } + ) + return results + + def process_document( + self, content: List[str] + ) -> List[KnowledgeBaseEmbeddingObject]: + """ + Process documents and generate embeddings. + + Args: + content: List of text contents to process + + Returns: + List of processed documents with embeddings + """ + all_docs = self.__extract_documents(content) + processed_docs = [] + for doc_id, doc_content in all_docs.items(): + chunks = { + f'chunk_{ind}': { + **data, + 'full_doc_id': doc_id, + 'file_path': getattr(doc_content, 'file_path', 'unknown_source'), + } + for ind, data in enumerate( + self.__chunk_with_langchain_recursive( + doc_content['content'], + self.tiktoken_model, + self.chunk_size, + self.chunk_overlap, + ) + ) + } + + data_list, _ = self.embedding.generate_document_embeddings(chunks) + processed_docs.extend(data_list) + + return processed_docs + + def retrieve_documents( + self, + query: str, + kb_id: uuid.UUID, + threshold: Optional[float] = None, + top_k: Optional[int] = None, + vector_weight: Optional[float] = None, + keyword_weight: Optional[float] = None, + ) -> list: + """ + Retrieve documents for a specific knowledge base + Args: + query: Text query for search + kb_id: Knowledge base ID to filter results + threshold: Cosine similarity threshold (default: 0.2) + top_k: Number of results to return (default: 5) + vector_weight: Weight for vector similarity score (default: 0.7) + keyword_weight: Weight for keyword similarity score (default: 0.3) + Returns: + List of retrieved documents + """ + if not isinstance(query, str): + raise ValueError('Query must be in string format') + + query_embeddings = self.embedding.generate_chunk_embeddings([query]) + query_embeddings = np.array(query_embeddings, dtype=np.float16).tolist() + query_embeddings = ast.literal_eval(','.join(map(str, query_embeddings[0]))) + + params = RetrieveParams( + kb_id=str(kb_id), + threshold=threshold, + top_k=top_k, + vector_weight=vector_weight, + keyword_weight=keyword_weight, + ) + reranked_docs = self.retrieve_docs_with_retry(query, query_embeddings, params) + return reranked_docs + + def upload_embedding_with_retry( + self, + embeddings: List[EmbeddingsToStore], + max_retries=3, + initial_delay=1.0, + ): + """ + Upload a single embedding with exponential backoff retry logic. + """ + doc_wise_embeddings = [] + for embedding_obj in embeddings: + data = embedding_obj.kb_embeddings + payload = { + 'embedding_vector': [ + embedding_obj.embedding_vector for embedding_obj in data + ], + 'embedding_vector_1': [ + embedding_obj.embedding_vector_1 for embedding_obj in data + ], + 'document_id': embedding_obj.doc_id, + 'kb_id': embedding_obj.kb_id, + 'chunk_text': [embedding_obj.chunk_text for embedding_obj in data], + 'chunk_index': [embedding_obj.chunk_index for embedding_obj in data], + } + doc_wise_embeddings.append(payload) + return self._upload_doc_wise_embeddings( + doc_wise_embeddings, max_retries, initial_delay + ) + + def _upload_doc_wise_embeddings( + self, + doc_wise_embeddings: List[Dict[str, Any]], + max_retries=3, + initial_delay=1.0, + ): + url = f'{FLOWARE_SERVICE_URL}/floware/v1/store_embedding' + delay = initial_delay + for attempt in range(max_retries): + try: + response = httpx.post( + url, + json={'embeddings': doc_wise_embeddings}, + headers=self._fetch_headers(), + ) + if response.status_code == 200: + return response + else: + logger.info(f'The error request was {response.text}') + except Exception as e: + logger.error(f'The error while uploading doc wise embeddings was {e}') + if attempt < max_retries - 1: + time.sleep(delay) + delay *= 1.5 + raise Exception('Failed to upload doc wise embeddings after max retries') diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/__init__.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/queue_message.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/queue_message.py new file mode 100644 index 00000000..13b61dea --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/queue_message.py @@ -0,0 +1,42 @@ +from dataclasses import dataclass +from typing import Optional, Dict, Any + + +@dataclass +class QueueMessage: + message_id: str + message_reciept_id: str + bucket_name: str + key: str + worker_id: Optional[str] = None + parse_type: Optional[str] = None + image_url: Optional[str] = None + metadata: Optional[Dict[str, Any]] = None + kb_id: Optional[str] = None + doc_id: Optional[str] = None + file_type: Optional[str] = None + + +@dataclass +class EventMessage: + id: str + + ack_id: str + + # should be a json set from + body: dict + + # parse type, either lambda or buckets + parse_type: str + + # for bucket cases + bucket_name: Optional[str] = None + + # for file in buckets + bucket_key: Optional[str] = None + + # kb_id for knowledge base + kb_id: Optional[str] = None + + # document_id for knowledge base + doc_id: Optional[str] = None diff --git a/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/rag_streamer.py b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/rag_streamer.py new file mode 100644 index 00000000..8557b16c --- /dev/null +++ b/wavefront/server/background_jobs/rag_ingestion/rag_ingestion/stream/rag_streamer.py @@ -0,0 +1,38 @@ +from flo_utils.streaming.stream_listner import StreamListener +from flo_cloud._types import MessageQueueDict +from flo_utils.streaming.event_message import BaseEventMessage +from typing import List + +from rag_ingestion.models.rag_message import RagEventMessage + + +class RagStreamListener(StreamListener): + def get_event_messages( + self, messages: List[MessageQueueDict] + ) -> List[BaseEventMessage]: + return [self.__make_event_message(msg) for msg in messages] + + def __make_event_message(self, message: MessageQueueDict) -> RagEventMessage: + bucket_name, bucket_key, kb_id, doc_id, parse_type, file_type = ( + self.__fetch_bucket_details(message.body) + ) + return RagEventMessage( + id=message.id, + ack_id=message.ack_id, + bucket_name=bucket_name, + bucket_key=bucket_key, + kb_id=kb_id, + doc_id=doc_id, + parse_type=parse_type, + file_type=file_type, + body=message.body, + ) + + def __fetch_bucket_details(self, body: dict): + bucket_name = body['bucket'] if 'bucket' in body else body['bucket_name'] + bucket_key = body['name'] if 'name' in body else body['key'] + kb_id = body['kb_id'] if 'kb_id' in body else None + doc_id = body['doc_id'] if 'doc_id' in body else None + parse_type = body['parse_type'] if 'parse_type' in body else None + file_type = body['file_type'] if 'file_type' in body else None + return bucket_name, bucket_key, kb_id, doc_id, parse_type, file_type diff --git a/wavefront/server/background_jobs/workflow_job/pyproject.toml b/wavefront/server/background_jobs/workflow_job/pyproject.toml new file mode 100644 index 00000000..4ac51a2a --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/pyproject.toml @@ -0,0 +1,28 @@ +[project] +name = "workflow-job" +version = "0.0.1" +description = "AI Insights from Voice" +authors = [ + {name = "rootflo", email = "*@rootfor.xyz"} +] +requires-python = ">=3.11" +dependencies = [ + "agents-module", + "flo-cloud", + "flo-utils", + "common-module", + "python-dotenv>=1.1.0,<2.0.0", +] + +[tool.uv.sources] +agents-module = { workspace = true} +flo-cloud = { workspace = true } +flo-utils = { workspace = true } +common-module = { workspace = true } + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["workflow_job"] diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/__init__.py b/wavefront/server/background_jobs/workflow_job/workflow_job/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/config.ini b/wavefront/server/background_jobs/workflow_job/workflow_job/config.ini new file mode 100644 index 00000000..cfb4301e --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/config.ini @@ -0,0 +1,26 @@ +[app_config] +thread_count = ${THREAD_COUNT:3} +app_env = ${APP_ENV:dev} +passthrough_secret = ${PASSTHROUGH_SECRET} + +[agents] +agent_yaml_bucket = ${AGENT_YAML_BUCKET} + +[cloud_config] +cloud_provider = ${CLOUD_PROVIDER} + +[gcp_config] +project_id = ${GCP_PROJECT_ID} +pubsub_subscription_id = ${GCP_PUBSUB_SUBSCRIPTION_ID} +pubsub_topic_id = ${GCP_PUBSUB_TOPIC_ID} +bucket = ${APPLICATION_BUCKET} + +[floware] +service_url = ${FLOWARE_SERVICE_URL} + +[database] +username = ${DB_USERNAME} +password = ${DB_PASSWORD} +host = ${DB_HOST} +port = ${DB_PORT} +db_name = ${DB_NAME} diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/constants/__init__.py b/wavefront/server/background_jobs/workflow_job/workflow_job/constants/__init__.py new file mode 100644 index 00000000..4e3ca649 --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/constants/__init__.py @@ -0,0 +1 @@ +# Empty file for package diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/constants/auth.py b/wavefront/server/background_jobs/workflow_job/workflow_job/constants/auth.py new file mode 100644 index 00000000..f623278c --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/constants/auth.py @@ -0,0 +1,10 @@ +"""Authentication constants.""" + + +class RootfloHeaders: + CLIENT_KEY = 'X-Rootflo-Key' + PASSTHROUGH = 'X-Passthrough' + + +AUTH_ROLE_ID = 'workflow_job' +SERVICE_AUTH_ROLE_ID = 'workflow-service' diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/main.py b/wavefront/server/background_jobs/workflow_job/workflow_job/main.py new file mode 100644 index 00000000..70a1316f --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/main.py @@ -0,0 +1,103 @@ +from dotenv import load_dotenv + +# ruff: noqa: E402 +load_dotenv() + +from flo_cloud.message_queue import MessageQueueManager +from db_repo_module.cache.cache_manager import CacheManager +from workflow_job.workflow_listener import WorkflowListener +from workflow_job.workflow_processor import WorkflowMessageProcessor +from flo_cloud.cloud_storage import CloudStorageManager + +from api_services_module.api_services_container import create_api_services_container +from db_repo_module.db_repo_container import DatabaseModuleContainer +from common_module.common_container import CommonContainer +from api_services_module.api_services_container import ApiServicesContainer +from agents_module.agents_container import AgentsContainer +from tools_module.tools_container import ToolsContainer +from plugins_module.plugins_container import PluginsContainer + + +db_repo_container = DatabaseModuleContainer() +common_container = CommonContainer(cache_manager=db_repo_container.cache_manager) +config = common_container.config() + +# API Services Container +api_services_container: ApiServicesContainer = create_api_services_container( + api_service_repository=db_repo_container.api_services_repository, + cloud_storage_manager=common_container.cloud_storage_manager, + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + response_formatter=common_container.response_formatter, +) + +tools_container = ToolsContainer() + +agents_container = AgentsContainer( + db_client=db_repo_container.db_client, + cloud_storage_manager=common_container.cloud_storage_manager, + cache_manager=db_repo_container.cache_manager, + tool_loader=tools_container.tool_loader, + workflow_pipeline_repository=db_repo_container.workflow_pipeline_repository, + workflow_runs_repository=db_repo_container.workflow_runs_repository, + namespace_repository=db_repo_container.namespace_repository, + agent_repository=db_repo_container.agent_repository, + workflow_repository=db_repo_container.workflow_repository, +) + +plugins_container = PluginsContainer( + db_client=db_repo_container.db_client, + cloud_manager=common_container.cloud_storage_manager, + dynamic_query_repository=db_repo_container.dynamic_query_repository, + cache_manager=db_repo_container.cache_manager, +) + +plugins_container.wire( + modules=[__name__], + packages=[ + 'plugins_module.controllers', + 'plugins_module.services', + 'user_management_module.controllers', + 'user_management_module.authorization', + 'tools_module.datasources', + ], +) + +api_services_container.wire( + modules=[__name__], + packages=[ + 'api_services_module.execution', + ], +) + + +def main(): + message_queue_manager = MessageQueueManager( + config['cloud_config']['cloud_provider'] + ) + cloud_storage_manager = CloudStorageManager( + config['cloud_config']['cloud_provider'] + ) + cache_manager = CacheManager(namespace='workflow-worker') + + # Initialize stream listener + listener = WorkflowListener( + event_manager=message_queue_manager, + processor=WorkflowMessageProcessor( + cloud_storage_manager=cloud_storage_manager, + cache_manager=cache_manager, + workflow_inference_service=agents_container.workflow_inference_service(), + floware_service_url=config['floware']['service_url'], + app_env=config['app_config']['app_env'], + passthrough_secret=config['app_config']['passthrough_secret'], + ), + cache_manager=cache_manager, + retry_count=3, + streaming_batch_size=5, + ) + + listener.run_workers(thread_count=int(config['app_config']['thread_count'])) + + +if __name__ == '__main__': + main() diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/models.py b/wavefront/server/background_jobs/workflow_job/workflow_job/models.py new file mode 100644 index 00000000..93fcbab1 --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/models.py @@ -0,0 +1,7 @@ +from flo_utils.streaming.event_message import BaseEventMessage +from dataclasses import dataclass + + +@dataclass +class WorkflowEventMessage(BaseEventMessage): + body: dict diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_listener.py b/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_listener.py new file mode 100644 index 00000000..e13a0e59 --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_listener.py @@ -0,0 +1,19 @@ +from flo_utils.streaming.stream_listner import StreamListener +from flo_cloud._types import MessageQueueDict +from flo_utils.streaming.event_message import BaseEventMessage +from typing import List +from .models import WorkflowEventMessage + + +class WorkflowListener(StreamListener): + def get_event_messages( + self, messages: List[MessageQueueDict] + ) -> List[BaseEventMessage]: + return [self.__make_event_message(msg) for msg in messages] + + def __make_event_message(self, message: MessageQueueDict) -> WorkflowEventMessage: + return WorkflowEventMessage( + id=message.id, + ack_id=message.ack_id, + body=message.body, + ) diff --git a/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_processor.py b/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_processor.py new file mode 100644 index 00000000..000eca36 --- /dev/null +++ b/wavefront/server/background_jobs/workflow_job/workflow_job/workflow_processor.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any, Dict, List, cast + +import requests +from workflow_job.constants.auth import RootfloHeaders +from agents_module.services.workflow_inference_service import WorkflowInferenceService +from agents_module.utils.input_processing_utils import process_inference_inputs +from db_repo_module.cache.cache_manager import CacheManager +from flo_ai import BaseMessage +from flo_cloud.cloud_storage import CloudStorageManager +from flo_utils.streaming.event_message import BaseEventMessage +from flo_utils.streaming.message_processor import MessageProcessor, ProcessingResult +from flo_utils.constants.workflow import WorkflowStatus +from common_module.log.logger import logger + +from .models import WorkflowEventMessage + + +class WorkflowMessageProcessor(MessageProcessor[ProcessingResult[Dict[str, Any]]]): + def __init__( + self, + cloud_storage_manager: CloudStorageManager, + cache_manager: CacheManager, + workflow_inference_service: WorkflowInferenceService, + floware_service_url: str, + app_env: str = 'production', + passthrough_secret: str | None = None, + ): + self.cloud_storage_manager = cloud_storage_manager + self.cache_manager = cache_manager + self.workflow_inference_service = workflow_inference_service + self.floware_service_url = floware_service_url + self.app_env = app_env + self.passthrough_secret = passthrough_secret + + def _fetch_headers(self) -> dict[str, str]: + """ + Fetch headers for HTTP requests to floware service. + Adds passthrough authentication header for non-production environments. + + Returns: + dict: Headers to include in HTTP requests + """ + headers: dict[str, str] = {'Content-Type': 'application/json'} + + # Add passthrough header for non-production environments + if self.app_env != 'production' and self.passthrough_secret: + headers[RootfloHeaders.PASSTHROUGH] = self.passthrough_secret + + return headers + + async def process(self, message: BaseEventMessage) -> ProcessingResult: + workflow_message = cast(WorkflowEventMessage, message) + workflow_run_id = workflow_message.body['workflow_run_id'] + req_payload = { + 'workflow_run_id': workflow_run_id, + 'status': WorkflowStatus.IN_PROGRESS, + 'start_time': datetime.now().isoformat(), + } + + response = self.__update_workflow_run_state(req_payload, workflow_run_id) + if response.status_code != 200: + logger.info(f'Failed to update workflow run state: {response.json()}') + + pipeline_job = workflow_message.body['pipeline_job'] + workflow_data = workflow_message.body['workflow_data'] + + resolved_inputs = process_inference_inputs(pipeline_job['inputs']) + inference_inputs: List[BaseMessage] + if isinstance(resolved_inputs, list): + inference_inputs = [cast(BaseMessage, item) for item in resolved_inputs] + else: + inference_inputs = [resolved_inputs] + variables = pipeline_job['variables'] if pipeline_job['variables'] else {} + + ( + result, + execution_time, + ) = await self.workflow_inference_service.perform_inference_v2( + workflow_data=workflow_data, + variables=variables, + inputs=inference_inputs, + output_json_enabled=False, + ) + + return ProcessingResult( + success=True, + insights={ + 'result': result, + 'execution_time': execution_time, + 'workflow_run_id': workflow_run_id, + }, + ) + + def store( + self, + insights: List[ProcessingResult[Dict[str, Any]]], + is_failed: bool = False, + ) -> bool: + if not insights: + logger.info('Insights is None, returning store False') + return False + result = insights[0] + insights_payload = result.insights + if not isinstance(insights_payload, dict): + logger.info(f'Insights payload is not a dictionary for result: {result}') + return False + + workflow_run_id = insights_payload.get('workflow_run_id') + if not workflow_run_id: + logger.info(f'Workflow run ID not found for result: {result}') + return False + + req_payload = { + 'workflow_run_id': workflow_run_id, + 'end_time': datetime.now().isoformat(), + } + if result.success: + req_payload['status'] = WorkflowStatus.COMPLETED + req_payload['output'] = insights_payload.get('result') + elif is_failed: + req_payload['status'] = WorkflowStatus.FAILED + req_payload['error'] = result.error if result.error else 'Unknown error' + + response = self.__update_workflow_run_state(req_payload, workflow_run_id) + + if response.status_code != 200: + logger.info('Failed to update workflow run state') + return False + + for result in insights: + insights_payload = result.insights + if not isinstance(insights_payload, dict): + logger.info( + f'Insights payload is not a dictionary for result: {result}' + ) + return False + workflow_run_id: str | None = insights_payload.get('workflow_run_id') + if not workflow_run_id: + logger.info(f'Workflow run ID not found for result: {result}') + return False + + req_payload: dict[str, Any] = { + 'workflow_run_id': workflow_run_id, + 'end_time': datetime.now().isoformat(), + } + if result.success: + req_payload['status'] = WorkflowStatus.COMPLETED + req_payload['output'] = ( + insights_payload.get('result') + if insights_payload.get('result') + else None + ) + elif is_failed: + req_payload['status'] = WorkflowStatus.FAILED + req_payload['error'] = result.error if result.error else 'Unknown error' + + response: requests.Response = self.__update_workflow_run_state( + req_payload, workflow_run_id + ) + if response.status_code != 200: + logger.info( + f'Failed to update workflow run state for workflow run {workflow_run_id}: {response.json()}' + ) + return False + + return True + + def __update_workflow_run_state( + self, payload: dict[str, Any], workflow_run_id: str + ) -> requests.Response: + logger.info(f'Updating workflow run state for workflow run {workflow_run_id}') + req_headers = self._fetch_headers() + + return requests.put( + url=f'{self.floware_service_url}/v1/workflow-runs/{workflow_run_id}', + headers=req_headers, + json=payload, + ) diff --git a/wavefront/server/docker-compose.yml b/wavefront/server/docker-compose.yml new file mode 100644 index 00000000..a0b78199 --- /dev/null +++ b/wavefront/server/docker-compose.yml @@ -0,0 +1,34 @@ +version: '3.8' + +services: + postgres: + image: ankane/pgvector + container_name: postgres + restart: always + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=floware + ports: + - '5432:5432' + volumes: + - /var/lib/postgres-data:/var/lib/postgresql/data + - /var/lib/init.sql:/docker-entrypoint-initdb.d/init.sql + networks: + - floware-network + + redis: + image: redis:7.4 + container_name: redis + ports: + - "6379:6379" + networks: + - floware-network + +volumes: + db: + driver: local + +networks: + floware-network: + driver: bridge diff --git a/wavefront/server/modules/agents_module/agents_module/agents_container.py b/wavefront/server/modules/agents_module/agents_module/agents_container.py new file mode 100644 index 00000000..eccfce3c --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/agents_container.py @@ -0,0 +1,77 @@ +from dependency_injector import containers +from dependency_injector import providers +from agents_module.services.agent_inference_service import AgentInferenceService +from agents_module.services.agent_crud_service import AgentCrudService +from agents_module.services.namespace_service import NamespaceService +from agents_module.services.workflow_crud_service import WorkflowCrudService +from agents_module.services.workflow_inference_service import WorkflowInferenceService +from flo_cloud.message_queue import MessageQueueManager + + +class AgentsContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + db_client = providers.Dependency() + + cloud_storage_manager = providers.Dependency() + + cache_manager = providers.Dependency() + + tool_loader = providers.Dependency() + + workflow_pipeline_repository = providers.Dependency() + workflow_runs_repository = providers.Dependency() + + namespace_repository = providers.Dependency() + + agent_repository = providers.Dependency() + + workflow_repository = providers.Dependency() + + namespace_service = providers.Singleton( + NamespaceService, + namespace_repository=namespace_repository, + cache_manager=cache_manager, + ) + + agent_crud_service = providers.Singleton( + AgentCrudService, + agent_repository=agent_repository, + namespace_service=namespace_service, + cloud_storage_manager=cloud_storage_manager, + cache_manager=cache_manager, + bucket_name=config.agents.agent_yaml_bucket, + ) + + # Agent inference service + agent_inference_service = providers.Singleton( + AgentInferenceService, + cache_manager=cache_manager, + tool_loader=tool_loader, + agent_crud_service=agent_crud_service, + ) + + workflow_crud_service = providers.Singleton( + WorkflowCrudService, + workflow_repository=workflow_repository, + namespace_service=namespace_service, + cloud_storage_manager=cloud_storage_manager, + cache_manager=cache_manager, + bucket_name=config.agents.agent_yaml_bucket, + agent_crud_service=agent_crud_service, + tool_loader=tool_loader, + ) + + workflow_inference_service = providers.Singleton( + WorkflowInferenceService, + cloud_storage_manager=cloud_storage_manager, + cache_manager=cache_manager, + bucket_name=config.agents.agent_yaml_bucket, + agent_crud_service=agent_crud_service, + tool_loader=tool_loader, + ) + + message_queue_manager = providers.Singleton( + MessageQueueManager, + cloud_provider=config.cloud_config.cloud_provider, + ) diff --git a/wavefront/server/modules/agents_module/agents_module/controllers/agent_controller.py b/wavefront/server/modules/agents_module/agents_module/controllers/agent_controller.py new file mode 100644 index 00000000..aae8268b --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/controllers/agent_controller.py @@ -0,0 +1,439 @@ +from uuid import UUID +from fastapi import APIRouter, Depends, status, Path, Request, Query +from fastapi.responses import JSONResponse +from dependency_injector.wiring import inject, Provide + +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from agents_module.agents_container import AgentsContainer +from agents_module.services.agent_inference_service import AgentInferenceService +from agents_module.services.agent_crud_service import AgentCrudService +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from llm_inference_config_module.container import LlmInferenceConfigContainer +from agents_module.models.agent_schemas import ( + AgentInferenceRequest, + AgentInferenceResponse, +) +from agents_module.utils.input_processing_utils import process_inference_inputs +from agents_module.utils.auth_utils import extract_auth_credentials +from llm_inference_config_module.services.llm_inference_config_service import ( + LlmInferenceConfigService, +) +from tools_module.registry.tool_loader import ToolLoader + +agents_router = APIRouter() + + +@agents_router.post( + '/v1/agents/{namespace}/{agent_id}/inference', response_model=AgentInferenceResponse +) +@inject +async def agent_inference( + request: Request, + namespace: str = Path(..., description='The namespace of the agent'), + agent_id: str = Path(..., description='The ID of the agent to run inference with'), + agent_inference_payload: AgentInferenceRequest = ..., + agent_inference_service: AgentInferenceService = Depends( + Provide[AgentsContainer.agent_inference_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + """ + Run inference using a flo_ai agent + + This endpoint: + 1. Fetches the agent YAML configuration from cloud storage using namespace and agent_id as key (agents/{namespace}/{agent_id}.yaml) + 2. Creates an agent instance from the YAML using flo_ai.AgentBuilder + 3. Runs inference with the provided variables + 4. Returns the result along with execution metadata + + Args: + namespace: The namespace of the agent + agent_id: The unique identifier for the agent + agent_inference_payload: AgentInferenceRequest containing variables for the agent + + Returns: + AgentInferenceResponse: Contains the inference result and metadata + + """ + logger.info(f'Starting inference for namespace: {namespace}, agent_id: {agent_id}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Fetch LLM config if provided + llm_config = None + if agent_inference_payload.llm_inference_config_id: + llm_config_dict = await llm_inference_config_service.get_config( + agent_inference_payload.llm_inference_config_id + ) + if not llm_config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {agent_inference_payload.llm_inference_config_id}' + ), + ) + else: + llm_config = LlmInferenceConfig(**llm_config_dict) + + # Process inputs using common utility function + resolved_inputs = process_inference_inputs(agent_inference_payload.inputs) + + # Perform the complete inference workflow + result, execution_time = await agent_inference_service.perform_inference( + agent_id=agent_id, + namespace=namespace, + variables=agent_inference_payload.variables or {}, + inputs=resolved_inputs, + llm_config=llm_config, + output_json_enabled=agent_inference_payload.output_json_enabled, + access_token=access_token, + app_key=app_key, + ) + + response_data = AgentInferenceResponse( + result=result[-1].content, + agent_id=agent_id, + namespace=namespace, + execution_time=execution_time, + ) + + logger.info( + f'Successfully completed inference for namespace: {namespace}, agent_id: {agent_id}' + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent inference completed successfully', + 'data': response_data.model_dump(), + } + ), + ) + + +@agents_router.post( + '/v2/agents/{agent_id}/inference', response_model=AgentInferenceResponse +) +@inject +async def agent_inference_v2( + request: Request, + agent_inference_payload: AgentInferenceRequest, + agent_id: UUID = Path( + ..., description='The UUID of the agent to run inference with' + ), + agent_inference_service: AgentInferenceService = Depends( + Provide[AgentsContainer.agent_inference_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + """ + Run inference using a flo_ai agent (v2 - UUID-based) + + This endpoint: + 1. Fetches the agent from DB by UUID + 2. Retrieves YAML configuration from cloud storage + 3. Creates an agent instance from the YAML using flo_ai.AgentBuilder + 4. Runs inference with the provided variables + 5. Returns the result along with execution metadata + + Args: + agent_id: The UUID of the agent + request: Request containing variables for the agent + + Returns: + AgentInferenceResponse: Contains the inference result and metadata + """ + logger.info(f'Starting v2 inference for agent_id: {agent_id}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Fetch LLM config if provided + llm_config = None + if agent_inference_payload.llm_inference_config_id: + llm_config_dict = await llm_inference_config_service.get_config( + agent_inference_payload.llm_inference_config_id + ) + if not llm_config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {agent_inference_payload.llm_inference_config_id}' + ), + ) + else: + llm_config = LlmInferenceConfig(**llm_config_dict) + + # Process inputs using common utility function + resolved_inputs = process_inference_inputs(agent_inference_payload.inputs) + + try: + # Perform the complete inference workflow (v2) + ( + result, + execution_time, + namespace, + ) = await agent_inference_service.perform_inference_v2( + agent_id=agent_id, + variables=agent_inference_payload.variables or {}, + inputs=resolved_inputs + if isinstance(resolved_inputs, list) + else [resolved_inputs], + llm_config=llm_config, + output_json_enabled=agent_inference_payload.output_json_enabled, + access_token=access_token, + app_key=app_key, + ) + except ValueError as e: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse(str(e)), + ) + + response_data = AgentInferenceResponse( + result=result[-1].content, + agent_id=str(agent_id), + namespace=namespace, + execution_time=execution_time, + ) + + logger.info(f'Successfully completed v2 inference for agent_id: {agent_id}') + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent inference completed successfully', + 'data': response_data.model_dump(), + } + ), + ) + + +@agents_router.post('/v1/agent-management/agents/{name}') +@inject +async def create_agent( + request: Request, + name: str = Path(..., description='The name of the agent to create'), + namespace: str = Query('default', description='The namespace for the agent'), + agent_crud_service: AgentCrudService = Depends( + Provide[AgentsContainer.agent_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tool_loader: ToolLoader = Depends(Provide[AgentsContainer.tool_loader]), +): + """ + Create a new agent + + Args: + name: The agent name (unique globally) + namespace: The namespace (defaults to 'default', created if doesn't exist) + request: Request containing raw YAML content as text/plain + + Returns: + JSONResponse: Success or error response with agent details + """ + logger.info(f'Creating agent - namespace: {namespace}, name: {name}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Read raw YAML content from request body + yaml_content = (await request.body()).decode('utf-8') + + agent = await agent_crud_service.create_agent( + name=name, + namespace=namespace, + yaml_content=yaml_content, + tool_available=tool_loader.load_all_tools(), + access_token=access_token, + app_key=app_key, + ) + + logger.info(f'Successfully created agent - namespace: {namespace}, name: {name}') + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent created successfully', + 'data': agent, + } + ), + ) + + +@agents_router.get('/v1/agent-management/agents/{agent_id}') +@inject +async def get_agent( + agent_id: UUID = Path(..., description='The UUID of the agent to retrieve'), + agent_crud_service: AgentCrudService = Depends( + Provide[AgentsContainer.agent_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get agent by UUID with YAML configuration + + Args: + agent_id: The agent UUID + + Returns: + JSONResponse: Agent details including YAML content + """ + logger.info(f'Getting agent by ID: {agent_id}') + + agent = await agent_crud_service.get_agent(agent_id) + + logger.info(f'Successfully retrieved agent - ID: {agent_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent retrieved successfully', + 'data': agent, + } + ), + ) + + +@agents_router.put('/v1/agent-management/agents/{agent_id}') +@inject +async def update_agent( + request: Request, + agent_id: UUID = Path(..., description='The UUID of the agent to update'), + agent_crud_service: AgentCrudService = Depends( + Provide[AgentsContainer.agent_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tool_loader: ToolLoader = Depends(Provide[AgentsContainer.tool_loader]), +): + """ + Update existing agent YAML configuration + + Args: + agent_id: The agent UUID + request: Request containing raw YAML content as text/plain + + Returns: + JSONResponse: Success or error response with updated agent details + """ + logger.info(f'Updating agent - ID: {agent_id}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Read raw YAML content from request body + yaml_content = (await request.body()).decode('utf-8') + + agent = await agent_crud_service.update_agent( + agent_id=agent_id, + yaml_content=yaml_content, + tool_available=tool_loader.load_all_tools(), + access_token=access_token, + app_key=app_key, + ) + + logger.info(f'Successfully updated agent - ID: {agent_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent updated successfully', + 'data': agent, + } + ), + ) + + +@agents_router.get('/v1/agent-management/agents') +@inject +async def list_agents( + namespace: str | None = Query( + None, description='Optional namespace to filter agents' + ), + agent_crud_service: AgentCrudService = Depends( + Provide[AgentsContainer.agent_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + List agents with optional namespace filtering + + Args: + namespace: Optional namespace to filter agents (returns all if not provided) + + Returns: + JSONResponse: List of agents (without YAML content) + """ + logger.info(f'Listing agents - namespace filter: {namespace}') + + agents_list = await agent_crud_service.list_agents(namespace=namespace) + + logger.info(f'Successfully retrieved {len(agents_list)} agents') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agents retrieved successfully', + 'data': {'agents': agents_list, 'count': len(agents_list)}, + } + ), + ) + + +@agents_router.delete('/v1/agent-management/agents/{agent_id}') +@inject +async def delete_agent( + agent_id: UUID = Path(..., description='The UUID of the agent to delete'), + agent_crud_service: AgentCrudService = Depends( + Provide[AgentsContainer.agent_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Delete an agent by UUID + + Args: + agent_id: The agent UUID + + Returns: + JSONResponse: Success or error response + """ + logger.info(f'Deleting agent - ID: {agent_id}') + + await agent_crud_service.delete_agent(agent_id) + + logger.info(f'Successfully deleted agent - ID: {agent_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Agent deleted successfully', + 'data': {'agent_id': str(agent_id)}, + } + ), + ) diff --git a/wavefront/server/modules/agents_module/agents_module/controllers/namespace_controller.py b/wavefront/server/modules/agents_module/agents_module/controllers/namespace_controller.py new file mode 100644 index 00000000..b37698ec --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/controllers/namespace_controller.py @@ -0,0 +1,43 @@ +from fastapi import APIRouter, Depends, status +from fastapi.responses import JSONResponse +from dependency_injector.wiring import inject, Provide + +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from agents_module.agents_container import AgentsContainer +from agents_module.services.namespace_service import NamespaceService + +namespace_router = APIRouter() + + +@namespace_router.get('/v1/namespaces') +@inject +async def list_namespaces( + namespace_service: NamespaceService = Depends( + Provide[AgentsContainer.namespace_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + List all namespaces + + Returns: + JSONResponse: List of all namespaces + """ + logger.info('Listing all namespaces') + + namespaces = await namespace_service.list_namespaces() + + logger.info(f'Successfully retrieved {len(namespaces)} namespaces') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Namespaces retrieved successfully', + 'data': {'namespaces': namespaces, 'count': len(namespaces)}, + } + ), + ) diff --git a/wavefront/server/modules/agents_module/agents_module/controllers/workflow_controller.py b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_controller.py new file mode 100644 index 00000000..47557bd0 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_controller.py @@ -0,0 +1,673 @@ +from uuid import UUID +from agents_module.utils.input_processing_utils import process_inference_inputs +from agents_module.utils.auth_utils import extract_auth_credentials +from fastapi import APIRouter, Depends, status, Path, Request, Query +from fastapi.responses import JSONResponse, StreamingResponse +from dependency_injector.wiring import inject, Provide +import json +import asyncio + +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from agents_module.agents_container import AgentsContainer +from agents_module.services.workflow_crud_service import WorkflowCrudService +from agents_module.services.workflow_inference_service import WorkflowInferenceService +from agents_module.services.workflow_events import ( + event_streamer, + create_workflow_event_callback, + DEFAULT_EVENTS_FILTER, +) +from agents_module.models.workflow_schemas import ( + WorkflowInferenceRequest, + WorkflowInferenceResponse, +) +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.workflow_pipeline import WorkflowPipeline + +workflows_router = APIRouter() + + +@workflows_router.post( + '/v1/workflows/{namespace}/{workflow_id}/inference', + response_model=WorkflowInferenceResponse, +) +@inject +async def workflow_inference( + request: Request, + namespace: str = Path(..., description='The namespace of the workflow'), + workflow_id: str = Path( + ..., description='The ID of the workflow to run inference with' + ), + request_body: WorkflowInferenceRequest = ..., + listen_events: bool = Query( + False, description='Enable real-time event streaming via WebSocket' + ), + workflow_inference_service: WorkflowInferenceService = Depends( + Provide[AgentsContainer.workflow_inference_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Run inference using a flo_ai workflow with optional real-time event streaming + + This endpoint: + 1. Fetches the workflow YAML configuration from cloud storage using namespace and workflow_id as key (workflows/{namespace}/{workflow_id}.yaml) + 2. Creates a workflow instance from the YAML using flo_ai.AriumBuilder + 3. Runs inference with the provided variables + 4. Optionally streams real-time events to connected WebSocket clients + 5. Returns the result along with execution metadata + + Args: + request: FastAPI request object for extracting user_id + namespace: The namespace of the workflow + workflow_id: The unique identifier for the workflow + request_body: Request containing variables and inputs for the workflow + listen_events: Whether to enable real-time event streaming + + Returns: + WorkflowInferenceResponse: Contains the inference result and metadata + + """ + logger.info( + f'Starting inference for namespace: {namespace}, workflow_id: {workflow_id}, listen_events: {listen_events}' + ) + + # Extract user_id from authenticated session + user_id = request.state.session.user_id + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + resolved_inputs = process_inference_inputs(request_body.inputs) + logger.info(f'Inputs to workflow: {resolved_inputs}') + + # Prepare event streaming if requested + event_callback = None + events_filter = None + + if listen_events or request_body.listen_events: + event_callback = create_workflow_event_callback(user_id, namespace, workflow_id) + events_filter = DEFAULT_EVENTS_FILTER + logger.info( + f'Event streaming enabled for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + # Check if streaming is requested + if listen_events or request_body.listen_events: + logger.info( + f'Streaming inference for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + # Get or create event queue for this user-workflow + event_queue = event_streamer.get_or_create_queue( + user_id, namespace, workflow_id + ) + + async def generate_inference_stream(): + """Generate streaming inference with events and final output""" + try: + # Start inference in background task + inference_task = asyncio.create_task( + workflow_inference_service.perform_inference( + workflow_name=workflow_id, + namespace=namespace, + variables=request_body.variables or {}, + inputs=resolved_inputs + if isinstance(resolved_inputs, list) + else [resolved_inputs], + output_json_enabled=request_body.output_json_enabled, + event_callback=event_callback, + events_filter=events_filter, + access_token=access_token, + app_key=app_key, + ) + ) + + # Stream events while workflow is running + workflow_completed = False + while not workflow_completed and not inference_task.done(): + try: + # Wait for event with timeout + event_data = await asyncio.wait_for( + event_queue.get(), timeout=1.0 + ) + yield f'data: {json.dumps(event_data)}\n\n' + await asyncio.sleep(0.1) # remove it later + + # Check if workflow ended + if event_data.get('event_type') in [ + 'workflow_completed', + 'workflow_failed', + ]: + workflow_completed = True + + except asyncio.TimeoutError: + # Continue waiting if no events + continue + + # Wait for inference to complete and get result + result, execution_time = await inference_task + + # Send final output event + output_event = { + 'event_type': 'output', + 'result': result, + 'workflow_id': workflow_id, + 'namespace': namespace, + 'execution_time': execution_time, + 'timestamp': asyncio.get_event_loop().time(), + } + yield f'data: {json.dumps(output_event)}\n\n' + await asyncio.sleep(0.1) # remove it later + + logger.info( + f'Streaming inference completed for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + except Exception as e: + logger.error( + f'Error in streaming inference for user {user_id}, workflow {namespace}/{workflow_id}: {e}' + ) + error_event = { + 'event_type': 'error', + 'error': str(e), + 'timestamp': asyncio.get_event_loop().time(), + } + yield f'data: {json.dumps(error_event)}\n\n' + finally: + # Clean up queue + event_streamer.cleanup_queue(user_id, namespace, workflow_id) + + return StreamingResponse( + generate_inference_stream(), + media_type='text/event-stream', + headers={ + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'Content-Type': 'text/event-stream', + 'Transfer-Encoding': 'chunked', + 'X-Accel-Buffering': 'no', # Disable nginx buffering + }, + ) + + else: + # Non-streaming mode - normal JSON response + result, execution_time = await workflow_inference_service.perform_inference( + workflow_name=workflow_id, + namespace=namespace, + variables=request_body.variables or {}, + inputs=resolved_inputs + if isinstance(resolved_inputs, list) + else [resolved_inputs], + output_json_enabled=request_body.output_json_enabled, + event_callback=event_callback, + events_filter=events_filter, + access_token=access_token, + app_key=app_key, + ) + + response_data = WorkflowInferenceResponse( + result=result, + workflow_id=workflow_id, + namespace=namespace, + execution_time=execution_time, + ) + + logger.info( + f'Successfully completed inference for namespace: {namespace}, workflow_id: {workflow_id}' + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow inference completed successfully', + 'data': response_data.model_dump(), + } + ), + ) + + +@workflows_router.post( + '/v2/workflows/{workflow_id}/inference', + response_model=WorkflowInferenceResponse, +) +@inject +async def workflow_inference_v2( + request: Request, + workflow_id: UUID = Path( + ..., description='The UUID of the workflow to run inference with' + ), + request_body: WorkflowInferenceRequest = ..., + listen_events: bool = Query( + False, description='Enable real-time event streaming via WebSocket' + ), + workflow_inference_service: WorkflowInferenceService = Depends( + Provide[AgentsContainer.workflow_inference_service] + ), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Run inference using a flo_ai workflow with optional real-time event streaming (v2 - UUID-based) + + This endpoint: + 1. Fetches the workflow from DB by UUID + 2. Retrieves YAML configuration from cloud storage + 3. Creates a workflow instance from the YAML using flo_ai.AriumBuilder + 4. Runs inference with the provided variables + 5. Optionally streams real-time events to connected WebSocket clients + 6. Returns the result along with execution metadata + + Args: + request: FastAPI request object for extracting user_id + workflow_id: The UUID of the workflow + request_body: Request containing variables and inputs for the workflow + listen_events: Whether to enable real-time event streaming + + Returns: + WorkflowInferenceResponse: Contains the inference result and metadata + """ + logger.info( + f'Starting v2 inference for workflow_id: {workflow_id}, listen_events: {listen_events}' + ) + + # Extract user_id from authenticated session + user_id = request.state.session.user_id + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Fetch workflow from DB first to get namespace and name + workflow_data = await workflow_crud_service.get_workflow(workflow_id) + namespace = workflow_data['namespace'] + workflow_name = workflow_data['name'] + + resolved_inputs = process_inference_inputs(request_body.inputs) + logger.info(f'Inputs to workflow: {resolved_inputs}') + + # Prepare event streaming if requested + event_callback = None + events_filter = None + + if listen_events or request_body.listen_events: + # Use real namespace and workflow name for event streaming + event_callback = create_workflow_event_callback( + user_id, namespace, workflow_name + ) + events_filter = DEFAULT_EVENTS_FILTER + logger.info( + f'Event streaming enabled for user {user_id}, workflow {namespace}/{workflow_name}' + ) + + # Check if streaming is requested + if listen_events or request_body.listen_events: + logger.info( + f'Streaming inference for user {user_id}, workflow {namespace}/{workflow_name}' + ) + + # Get or create event queue for this user-workflow + event_queue = event_streamer.get_or_create_queue( + user_id, namespace, workflow_name + ) + + async def generate_inference_stream(): + """Generate streaming inference with events and final output""" + try: + # Start inference in background task + inference_task = asyncio.create_task( + workflow_inference_service.perform_inference_v2( + workflow_data=workflow_data, + variables=request_body.variables or {}, + inputs=resolved_inputs + if isinstance(resolved_inputs, list) + else [resolved_inputs], + output_json_enabled=request_body.output_json_enabled, + event_callback=event_callback, + events_filter=events_filter, + access_token=access_token, + app_key=app_key, + ) + ) + + # Stream events while workflow is running + workflow_completed = False + while not workflow_completed and not inference_task.done(): + try: + # Wait for event with timeout + event_data = await asyncio.wait_for( + event_queue.get(), timeout=1.0 + ) + yield f'data: {json.dumps(event_data)}\n\n' + await asyncio.sleep(0.1) # remove it later + + # Check if workflow ended + if event_data.get('event_type') in [ + 'workflow_completed', + 'workflow_failed', + ]: + workflow_completed = True + + except asyncio.TimeoutError: + # Continue waiting if no events + continue + + # Wait for inference to complete and get result + result, execution_time = await inference_task + + # Send final output event + output_event = { + 'event_type': 'output', + 'result': result, + 'workflow_id': workflow_name, + 'namespace': namespace, + 'execution_time': execution_time, + 'timestamp': asyncio.get_event_loop().time(), + } + yield f'data: {json.dumps(output_event)}\n\n' + await asyncio.sleep(0.1) # remove it later + + logger.info( + f'Streaming inference completed for user {user_id}, workflow {namespace}/{workflow_name}' + ) + + except ValueError as e: + logger.error(f'Error in streaming inference: {e}') + error_event = { + 'event_type': 'error', + 'error': str(e), + 'timestamp': asyncio.get_event_loop().time(), + } + yield f'data: {json.dumps(error_event)}\n\n' + except Exception as e: + logger.error(f'Error in streaming inference: {e}') + error_event = { + 'event_type': 'error', + 'error': str(e), + 'timestamp': asyncio.get_event_loop().time(), + } + yield f'data: {json.dumps(error_event)}\n\n' + finally: + # Clean up queue + event_streamer.cleanup_queue(user_id, namespace, workflow_name) + + return StreamingResponse( + generate_inference_stream(), + media_type='text/event-stream', + headers={ + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive', + 'Content-Type': 'text/event-stream', + 'Transfer-Encoding': 'chunked', + 'X-Accel-Buffering': 'no', # Disable nginx buffering + }, + ) + + else: + # Non-streaming mode - normal JSON response + result, execution_time = await workflow_inference_service.perform_inference_v2( + workflow_data=workflow_data, + variables=request_body.variables or {}, + inputs=resolved_inputs + if isinstance(resolved_inputs, list) + else [resolved_inputs], + output_json_enabled=request_body.output_json_enabled, + event_callback=event_callback, + events_filter=events_filter, + access_token=access_token, + app_key=app_key, + ) + + response_data = WorkflowInferenceResponse( + result=result, + workflow_id=workflow_name, + namespace=namespace, + execution_time=execution_time, + ) + + logger.info( + f'Successfully completed v2 inference for workflow {namespace}/{workflow_name}' + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow inference completed successfully', + 'data': response_data.model_dump(), + } + ), + ) + + +@workflows_router.post('/v1/workflow-management/workflows/{name}') +@inject +async def create_workflow( + request: Request, + name: str = Path(..., description='The name of the workflow to create'), + namespace: str = Query('default', description='The namespace for the workflow'), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Create a new workflow + + Args: + name: The workflow name (unique globally) + namespace: The namespace (defaults to 'default', created if doesn't exist) + request: Request containing raw YAML content as text/plain + + Returns: + JSONResponse: Success or error response with workflow details + """ + logger.info(f'Creating workflow - namespace: {namespace}, name: {name}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Read raw YAML content from request body + yaml_content = (await request.body()).decode('utf-8') + + workflow = await workflow_crud_service.create_workflow( + name=name, + namespace=namespace, + yaml_content=yaml_content, + access_token=access_token, + app_key=app_key, + ) + + logger.info(f'Successfully created workflow - namespace: {namespace}, name: {name}') + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow created successfully', + 'data': workflow, + } + ), + ) + + +@workflows_router.get('/v1/workflow-management/workflows/{workflow_id}') +@inject +async def get_workflow( + workflow_id: UUID = Path(..., description='The UUID of the workflow to retrieve'), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get workflow by UUID with YAML configuration + + Args: + workflow_id: The workflow UUID + + Returns: + JSONResponse: Workflow details including YAML content + """ + logger.info(f'Getting workflow by ID: {workflow_id}') + + workflow = await workflow_crud_service.get_workflow(workflow_id) + + logger.info(f'Successfully retrieved workflow - ID: {workflow_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow retrieved successfully', + 'data': workflow, + } + ), + ) + + +@workflows_router.put('/v1/workflow-management/workflows/{workflow_id}') +@inject +async def update_workflow( + request: Request, + workflow_id: UUID = Path(..., description='The UUID of the workflow to update'), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Update existing workflow YAML configuration + + Args: + workflow_id: The workflow UUID + request: Request containing raw YAML content as text/plain + + Returns: + JSONResponse: Success or error response with updated workflow details + """ + logger.info(f'Updating workflow - ID: {workflow_id}') + + # Extract authentication credentials + access_token, app_key = extract_auth_credentials(request) + + # Read raw YAML content from request body + yaml_content = (await request.body()).decode('utf-8') + + workflow = await workflow_crud_service.update_workflow( + workflow_id=workflow_id, + yaml_content=yaml_content, + access_token=access_token, + app_key=app_key, + ) + + logger.info(f'Successfully updated workflow - ID: {workflow_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow updated successfully', + 'data': workflow, + } + ), + ) + + +@workflows_router.get('/v1/workflow-management/workflows') +@inject +async def list_workflows( + namespace: str | None = Query( + None, description='Optional namespace to filter workflows' + ), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + List workflows with optional namespace filtering + + Args: + namespace: Optional namespace to filter workflows (returns all if not provided) + + Returns: + JSONResponse: List of workflows (without YAML content) + """ + logger.info(f'Listing workflows - namespace filter: {namespace}') + + workflows_list = await workflow_crud_service.list_workflows(namespace=namespace) + + logger.info(f'Successfully retrieved {len(workflows_list)} workflows') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflows retrieved successfully', + 'data': {'workflows': workflows_list, 'count': len(workflows_list)}, + } + ), + ) + + +@workflows_router.delete('/v1/workflow-management/workflows/{workflow_id}') +@inject +async def delete_workflow( + workflow_id: UUID = Path(..., description='The UUID of the workflow to delete'), + workflow_crud_service: WorkflowCrudService = Depends( + Provide[AgentsContainer.workflow_crud_service] + ), + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Delete a workflow by UUID + + Args: + workflow_id: The workflow UUID + + Returns: + JSONResponse: Success or error response + """ + logger.info(f'Deleting workflow - ID: {workflow_id}') + + # Check if there are any workflow pipelines associated with this workflow + workflow_pipeline = await workflow_pipeline_repository.find(workflow_id=workflow_id) + + if len(workflow_pipeline) > 0: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Delete workflow pipelines associated with this workflow first' + ), + ) + + # No pipelines found, proceed with deletion + await workflow_crud_service.delete_workflow(workflow_id) + + logger.info(f'Successfully deleted workflow - ID: {workflow_id}') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow deleted successfully', + 'data': {'workflow_id': str(workflow_id)}, + } + ), + ) diff --git a/wavefront/server/modules/agents_module/agents_module/controllers/workflow_pipeline_controller.py b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_pipeline_controller.py new file mode 100644 index 00000000..85d2da90 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_pipeline_controller.py @@ -0,0 +1,323 @@ +from fastapi import APIRouter, Depends +from dependency_injector.wiring import inject, Provide + +from agents_module.agents_container import AgentsContainer +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.workflow_pipeline import WorkflowPipeline +from db_repo_module.models.workflow_runs import WorkflowRuns +from db_repo_module.models.workflow import Workflow +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from fastapi import status +from fastapi.responses import JSONResponse +from pydantic import BaseModel, Field +from typing import Optional, List, Dict, Any +from flo_cloud.message_queue import MessageQueueManager +from datetime import datetime +from flo_utils.constants.workflow import WorkflowStatus +import uuid + +workflow_pipeline_router = APIRouter(prefix='/v1') + + +class CreateWorkflowPipelinePayload(BaseModel): + name: str + description: Optional[str] = None + workflow_id: uuid.UUID + retry_policy: Optional[str] = None + timeout: Optional[int] = None + concurrency_limit: Optional[int] = None + + +class UpdateWorkflowPipelinePayload(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + workflow_id: Optional[uuid.UUID] = None + retry_policy: Optional[str] = None + timeout: Optional[int] = None + concurrency_limit: Optional[int] = None + + +class WorkflowMessage(BaseModel): + inputs: List[dict] | str = Field( + ..., + description='Inputs to use for inference', + example=[ + 'Process the following text: with ' + ], + ) + variables: Optional[Dict[str, Any]] = Field( + default=None, + description='Variables to pass to the workflow during inference', + example={ + 'target_language': 'Spanish', + 'tone': 'formal', + 'text_to_process': 'Welcome to our application', + }, + ) + + +class SubmitWorkflowPipelinePayload(BaseModel): + pipeline_job: WorkflowMessage + + +@workflow_pipeline_router.post('/workflow-pipelines') +@inject +async def create_workflow_pipeline( + create_workflow_pipeline_payload: CreateWorkflowPipelinePayload, + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + workflow_repository: SQLAlchemyRepository[Workflow] = Depends( + Provide[AgentsContainer.workflow_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + # Verify workflow exists + workflow = await workflow_repository.find_one( + id=create_workflow_pipeline_payload.workflow_id + ) + if not workflow: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Workflow not found with ID: {create_workflow_pipeline_payload.workflow_id}' + ), + ) + + created_workflow_pipeline = await workflow_pipeline_repository.create( + name=create_workflow_pipeline_payload.name, + description=create_workflow_pipeline_payload.description, + workflow_id=create_workflow_pipeline_payload.workflow_id, + retry_policy=create_workflow_pipeline_payload.retry_policy, + timeout=create_workflow_pipeline_payload.timeout, + concurrency_limit=create_workflow_pipeline_payload.concurrency_limit, + ) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow pipeline created successfully', + 'workflow_pipeline_id': str(created_workflow_pipeline.id), + } + ), + ) + + +@workflow_pipeline_router.post('/workflow-pipelines/{workflow_pipeline_id}/submit') +@inject +async def submit_workflow_to_pipeline( + workflow_pipeline_id: str, + submit_workflow_pipeline_payload: SubmitWorkflowPipelinePayload, + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + workflow_repository: SQLAlchemyRepository[Workflow] = Depends( + Provide[AgentsContainer.workflow_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + message_queue_manager: MessageQueueManager = Depends( + Provide[AgentsContainer.message_queue_manager] + ), + config: dict[str, Any] = Depends(Provide[AgentsContainer.config]), +): + workflow_pipeline = await workflow_pipeline_repository.find_one( + id=workflow_pipeline_id + ) + + if not workflow_pipeline: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + 'Workflow pipeline not found' + ), + ) + + workflow = await workflow_repository.find_one(id=workflow_pipeline.workflow_id) + + if not workflow: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + 'Workflow not found for this pipeline' + ), + ) + + workflow_run = await workflow_run_repository.create( + workflow_pipeline_id=workflow_pipeline_id, + start_time=datetime.now(), + status=WorkflowStatus.INITIATED, + ) + + workflow_run_id = str(workflow_run.id) + + pipeline_job_payload = submit_workflow_pipeline_payload.model_dump(mode='json') + message_queue_manager.add_message( + message_body={ + 'workflow_run_id': workflow_run_id, + 'workflow_pipeline_id': workflow_pipeline_id, + 'pipeline_job': pipeline_job_payload['pipeline_job'], + 'workflow_data': workflow.to_dict(), + }, + topic_name_or_queue_url=config['workflow']['worker_topic'], + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Request submitted to workflow pipeline', + 'workflow_run_id': workflow_run_id, + } + ), + ) + + +@workflow_pipeline_router.get('/workflow-pipelines') +@inject +async def get_workflow_pipelines( + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + workflow_pipelines = await workflow_pipeline_repository.find() + workflow_pipelines_list = [ + workflow_pipeline.to_dict() for workflow_pipeline in workflow_pipelines + ] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'workflow_pipelines': workflow_pipelines_list} + ), + ) + + +@workflow_pipeline_router.get('/workflow-pipelines/{workflow_pipeline_id}') +@inject +async def get_workflow_pipeline( + workflow_pipeline_id: str, + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + workflow_pipeline = await workflow_pipeline_repository.find_one( + id=workflow_pipeline_id + ) + if not workflow_pipeline: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + 'Workflow pipeline not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(workflow_pipeline.to_dict()), + ) + + +@workflow_pipeline_router.put('/workflow-pipelines/{workflow_pipeline_id}') +@inject +async def update_workflow_pipeline( + workflow_pipeline_id: str, + update_workflow_pipeline_payload: UpdateWorkflowPipelinePayload, + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + workflow_repository: SQLAlchemyRepository[Workflow] = Depends( + Provide[AgentsContainer.workflow_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + existing_workflow_pipeline = await workflow_pipeline_repository.find_one( + id=workflow_pipeline_id + ) + if not existing_workflow_pipeline: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + 'Workflow pipeline not found' + ), + ) + + # If workflow_id is being updated, verify it exists + if update_workflow_pipeline_payload.workflow_id: + workflow = await workflow_repository.find_one( + id=update_workflow_pipeline_payload.workflow_id + ) + if not workflow: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Workflow not found with ID: {update_workflow_pipeline_payload.workflow_id}' + ), + ) + + await workflow_pipeline_repository.find_one_and_update( + filters={'id': workflow_pipeline_id}, + name=update_workflow_pipeline_payload.name, + description=update_workflow_pipeline_payload.description, + workflow_id=update_workflow_pipeline_payload.workflow_id, + retry_policy=update_workflow_pipeline_payload.retry_policy, + timeout=update_workflow_pipeline_payload.timeout, + concurrency_limit=update_workflow_pipeline_payload.concurrency_limit, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow pipeline updated successfully', + 'workflow_pipeline_id': workflow_pipeline_id, + } + ), + ) + + +@workflow_pipeline_router.delete('/workflow-pipelines/{workflow_pipeline_id}') +@inject +async def delete_workflow_pipeline( + workflow_pipeline_id: str, + workflow_pipeline_repository: SQLAlchemyRepository[WorkflowPipeline] = Depends( + Provide[AgentsContainer.workflow_pipeline_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + deleted_workflow_pipeline = await workflow_pipeline_repository.delete_all( + id=workflow_pipeline_id + ) + if not deleted_workflow_pipeline: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + 'Workflow pipeline not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow pipeline deleted successfully', + 'workflow_pipeline_id': workflow_pipeline_id, + } + ), + ) diff --git a/wavefront/server/modules/agents_module/agents_module/controllers/workflow_runs.py b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_runs.py new file mode 100644 index 00000000..5fc1dca2 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/controllers/workflow_runs.py @@ -0,0 +1,199 @@ +from fastapi import APIRouter, Depends, Query, status +from fastapi.responses import JSONResponse +from dependency_injector.wiring import inject, Provide +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.workflow_runs import WorkflowRuns +from agents_module.agents_container import AgentsContainer +from pydantic import BaseModel, Field +from typing import Optional +from datetime import datetime +from sqlalchemy import select, desc +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer + +workflow_runs_router = APIRouter(prefix='/v1') + + +class CreateWorkflowRunPayload(BaseModel): + workflow_pipeline_id: str + status: Optional[str] = None + start_time: datetime = Field(default_factory=datetime.now) + end_time: Optional[datetime] = None + error: Optional[str] = None + output: Optional[str] = None + + +class UpdateWorkflowRunPayload(BaseModel): + status: Optional[str] = None + end_time: Optional[datetime] = None + error: Optional[str] = None + output: Optional[str] = None + + +@workflow_runs_router.post('/workflow-runs') +@inject +async def create_workflow_run( + create_workflow_run_payload: CreateWorkflowRunPayload, + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + created_workflow_run = await workflow_run_repository.create( + workflow_pipeline_id=create_workflow_run_payload.workflow_pipeline_id, + status=create_workflow_run_payload.status, + start_time=create_workflow_run_payload.start_time, + end_time=create_workflow_run_payload.end_time, + error=create_workflow_run_payload.error, + output=create_workflow_run_payload.output, + ) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow run created successfully', + 'workflow_run_id': str(created_workflow_run.id), + } + ), + ) + + +@workflow_runs_router.get('/workflow-runs') +@inject +async def get_workflow_runs( + workflow_pipeline_id: Optional[str] = None, + workflow_status: Optional[str] = None, + offset: Optional[int] = Query(0, ge=0, description='The number of items to skip'), + limit: Optional[int] = Query( + 100, ge=1, le=2000, description='The maximum number of items to return' + ), + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + filters = {} + if workflow_pipeline_id: + filters['workflow_pipeline_id'] = workflow_pipeline_id + if workflow_status: + filters['status'] = workflow_status + + # Get paginated results using session to ensure offset is applied + async with workflow_run_repository.session() as session: + query = select(WorkflowRuns) + for key, value in filters.items(): + if isinstance(value, list): + query = query.where(getattr(WorkflowRuns, key).in_(value)) + else: + query = query.where(getattr(WorkflowRuns, key) == value) + query = query.order_by(desc(WorkflowRuns.end_time)).offset(offset).limit(limit) + result = await session.execute(query) + workflow_runs = result.scalars().all() + + workflow_runs_list = [workflow_run.to_dict() for workflow_run in workflow_runs] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'workflow_runs': workflow_runs_list, + } + ), + ) + + +@workflow_runs_router.get('/workflow-runs/{workflow_run_id}') +@inject +async def get_workflow_run( + workflow_run_id: str, + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + workflow_run = await workflow_run_repository.find_one(id=workflow_run_id) + if not workflow_run: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('Workflow run not found'), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'workflow_run': workflow_run.to_dict()} + ), + ) + + +@workflow_runs_router.put('/workflow-runs/{workflow_run_id}') +@inject +async def update_workflow_run( + workflow_run_id: str, + update_workflow_run_payload: UpdateWorkflowRunPayload, + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + existing_workflow_run = await workflow_run_repository.find_one(id=workflow_run_id) + if not existing_workflow_run: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('Workflow run not found'), + ) + + await workflow_run_repository.find_one_and_update( + filters={'id': workflow_run_id}, + **{ + 'status': update_workflow_run_payload.status, + 'end_time': update_workflow_run_payload.end_time, + 'error': update_workflow_run_payload.error, + 'output': update_workflow_run_payload.output, + }, + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow run updated successfully', + 'workflow_run_id': workflow_run_id, + } + ), + ) + + +@workflow_runs_router.delete('/workflow-runs/{workflow_run_id}') +@inject +async def delete_workflow_run( + workflow_run_id: str, + workflow_run_repository: SQLAlchemyRepository[WorkflowRuns] = Depends( + Provide[AgentsContainer.workflow_runs_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + deleted_workflow_run = await workflow_run_repository.delete_all(id=workflow_run_id) + if not deleted_workflow_run: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('Workflow run not found'), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Workflow run deleted successfully', + 'workflow_run_id': workflow_run_id, + } + ), + ) diff --git a/wavefront/server/modules/agents_module/agents_module/models/agent_schemas.py b/wavefront/server/modules/agents_module/agents_module/models/agent_schemas.py new file mode 100644 index 00000000..38083ff4 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/models/agent_schemas.py @@ -0,0 +1,80 @@ +import uuid +from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field + + +class AgentInferenceRequest(BaseModel): + """Request model for agent inference""" + + variables: Dict[str, Any] | None = Field( + default=None, + description='Variables to pass to the agent during inference', + example={ + 'target_language': 'Spanish', + 'tone': 'formal', + 'text_to_translate': 'Welcome to our application', + }, + ) + inputs: List[dict | str] | str = Field( + ..., + description='Inputs to use for inference', + example=[ + 'Translate the following text: to ' + ], + ) + llm_inference_config_id: Optional[uuid.UUID] = Field( + default=None, + description="Optional ID of LLM inference configuration to override agent's default LLM", + ) + output_json_enabled: bool = Field( + default=True, + description='Whether to extract JSON from agent response. If False, returns raw string output.', + ) + tool_names: Optional[List[str]] = Field( + default=None, + description='Optional list of tool names to load and make available to the agent during inference', + example=['bigquery_test_connection', 'bigquery_fetch_data'], + ) + + +class AgentInferenceResponse(BaseModel): + """Response model for agent inference""" + + result: str = Field(..., description='The inference result from the agent') + agent_id: str = Field( + ..., description='The ID of the agent that performed the inference' + ) + namespace: str = Field( + ..., description='The namespace of the agent that performed the inference' + ) + execution_time: float = Field(..., description='Execution time in seconds') + + +class AgentResponse(BaseModel): + """Response model for single agent with YAML content""" + + id: uuid.UUID = Field(..., description='The unique UUID of the agent') + name: str = Field(..., description='The unique name of the agent') + namespace: str = Field(..., description='The namespace of the agent') + yaml_content: str = Field( + ..., description='YAML configuration content of the agent' + ) + created_at: str = Field(..., description='Creation timestamp in ISO format') + updated_at: str = Field(..., description='Last update timestamp in ISO format') + + +class AgentListItem(BaseModel): + """Response model for agent metadata in list operations (without YAML)""" + + id: uuid.UUID = Field(..., description='The unique UUID of the agent') + name: str = Field(..., description='The unique name of the agent') + namespace: str = Field(..., description='The namespace of the agent') + created_at: str = Field(..., description='Creation timestamp in ISO format') + updated_at: str = Field(..., description='Last update timestamp in ISO format') + + +class AgentsListResponse(BaseModel): + """Response model for listing multiple agents""" + + agents: List[AgentListItem] = Field(..., description='List of agent metadata') + count: int = Field(..., description='Total number of agents returned') diff --git a/wavefront/server/modules/agents_module/agents_module/models/workflow_schemas.py b/wavefront/server/modules/agents_module/agents_module/models/workflow_schemas.py new file mode 100644 index 00000000..d48fc8b6 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/models/workflow_schemas.py @@ -0,0 +1,106 @@ +import uuid +from typing import Any, Dict, List, Optional +from pydantic import BaseModel, Field + + +class WorkflowInferenceRequest(BaseModel): + """Request model for workflow inference""" + + variables: Dict[str, Any] | None = Field( + default=None, + description='Variables to pass to the workflow during inference', + example={ + 'target_language': 'Spanish', + 'tone': 'formal', + 'text_to_process': 'Welcome to our application', + }, + ) + inputs: List[dict | str] | str = Field( + ..., + description='Inputs to use for inference', + example=[ + 'Process the following text: with ' + ], + ) + output_json_enabled: bool = Field( + default=False, + description='Whether to extract JSON from workflow response. If False, returns raw string output.', + ) + listen_events: bool = Field( + default=False, + description='Whether to enable real-time event streaming via WebSocket during workflow execution.', + ) + + +class WorkflowInferenceResponse(BaseModel): + """Response model for workflow inference""" + + result: str | Dict = Field( + ..., description='The inference result from the workflow' + ) + workflow_id: str = Field( + ..., description='The ID of the workflow that performed the inference' + ) + namespace: str = Field( + ..., description='The namespace of the workflow that performed the inference' + ) + execution_time: float = Field(..., description='Execution time in seconds') + + +class WorkflowResponse(BaseModel): + """Response model for single workflow with YAML content""" + + id: uuid.UUID = Field(..., description='The unique UUID of the workflow') + name: str = Field(..., description='The unique name of the workflow') + namespace: str = Field(..., description='The namespace of the workflow') + yaml_content: str = Field( + ..., description='YAML configuration content of the workflow' + ) + created_at: str = Field(..., description='Creation timestamp in ISO format') + updated_at: str = Field(..., description='Last update timestamp in ISO format') + + +class WorkflowListItem(BaseModel): + """Response model for workflow metadata in list operations (without YAML)""" + + id: uuid.UUID = Field(..., description='The unique UUID of the workflow') + name: str = Field(..., description='The unique name of the workflow') + namespace: str = Field(..., description='The namespace of the workflow') + created_at: str = Field(..., description='Creation timestamp in ISO format') + updated_at: str = Field(..., description='Last update timestamp in ISO format') + + +class WorkflowsListResponse(BaseModel): + """Response model for listing multiple workflows""" + + workflows: List[WorkflowListItem] = Field( + ..., description='List of workflow metadata' + ) + count: int = Field(..., description='Total number of workflows returned') + + +class WorkflowEventMessage(BaseModel): + """Model for WebSocket workflow event messages""" + + event_type: str = Field(..., description='Type of workflow event') + timestamp: float = Field(..., description='Unix timestamp when the event occurred') + workflow_id: str = Field(..., description='ID of the workflow generating the event') + namespace: str = Field(..., description='Namespace of the workflow') + node_name: Optional[str] = Field( + None, description='Name of the node involved in the event' + ) + node_type: Optional[str] = Field( + None, description='Type of node (agent, tool, start, end)' + ) + execution_time: Optional[float] = Field( + None, description='Time taken for node execution in seconds' + ) + error: Optional[str] = Field( + None, description='Error message if event represents a failure' + ) + router_choice: Optional[str] = Field( + None, description='Node chosen by router decision' + ) + metadata: Optional[Dict[str, Any]] = Field( + None, description='Additional event-specific data' + ) diff --git a/wavefront/server/modules/agents_module/agents_module/services/agent_crud_service.py b/wavefront/server/modules/agents_module/agents_module/services/agent_crud_service.py new file mode 100644 index 00000000..3324edda --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/agent_crud_service.py @@ -0,0 +1,461 @@ +import json +import yaml +from typing import List, Optional +from uuid import UUID + +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.agent import Agent +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from flo_cloud.cloud_storage import CloudStorageManager +from flo_cloud.exceptions import CloudStorageFileNotFoundError +from common_module.log.logger import logger +from agents_module.services.namespace_service import NamespaceService +from agents_module.utils.agent_utils import get_agent_yaml_key +from agents_module.utils.cache_utils import ( + get_agent_by_id_cache_key, + get_agent_yaml_cache_key, + get_agents_list_cache_key, +) +from agents_module.utils.validation_utils import validate_agent_workflow_name +from flo_ai import AgentBuilder +from flo_ai.tool.base_tool import Tool + + +class AgentCrudService: + """Service for handling agent CRUD operations with DB + cloud storage""" + + def __init__( + self, + agent_repository: SQLAlchemyRepository[Agent], + namespace_service: NamespaceService, + cloud_storage_manager: CloudStorageManager, + cache_manager: CacheManager, + bucket_name: str, + ): + """ + Initialize the agent CRUD service + + Args: + agent_repository: Agent repository for DB operations + namespace_service: Namespace service for namespace operations + cloud_storage_manager: Cloud storage manager instance + cache_manager: Cache manager instance + bucket_name: Name of the bucket containing agent YAML files + """ + self.agent_repository = agent_repository + self.namespace_service = namespace_service + self.cloud_storage_manager = cloud_storage_manager + self.cache_manager = cache_manager + self.bucket_name = bucket_name + self.cache_ttl = 3600 # 1 hour for agents + + def _validate_yaml_content( + self, + yaml_content: str, + namespace: str, + agent_name: str, + tool_available: List[Tool], + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> None: + """ + Validate YAML content by attempting to build an agent from it + + Args: + yaml_content: The YAML content to validate + namespace: The namespace for logging purposes + agent_name: The agent name for logging purposes + tool_available: List of available tools + + Raises: + ValueError: If YAML is invalid or agent cannot be built + """ + try: + yaml_data = yaml.safe_load(yaml_content) + yaml_tools = yaml_data.get('agent', {}).get('tools', None) + tool_registry = {} + if yaml_tools: + for tool in yaml_tools: + tool_name = tool.get('name', None) + if tool_name: + # Find the corresponding Tool object from tool_available list + for tool_obj in tool_available: + if tool_obj.name == tool_name: + tool_registry[tool_name] = tool_obj + break + + AgentBuilder.from_yaml( + yaml_str=yaml_content, + tool_registry=tool_registry, + access_token=access_token, + app_key=app_key, + ).build() + logger.info( + f'YAML validation successful for namespace: {namespace}, agent: {agent_name}' + ) + except Exception as e: + logger.error( + f'YAML validation failed for namespace: {namespace}, agent: {agent_name}: {str(e)}' + ) + raise ValueError(f'Invalid agent YAML configuration: {str(e)}') + + async def create_agent( + self, + name: str, + namespace: str, + yaml_content: str, + tool_available: List[Tool], + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> dict: + """ + Create a new agent (DB + cloud storage) + + Args: + name: The agent name + namespace: The namespace name (will be created if doesn't exist) + yaml_content: YAML configuration content + tool_available: List of available tools + + Returns: + dict: Created agent details including YAML content + + Raises: + ValueError: If agent already exists or validation fails + """ + logger.info(f'Creating agent - namespace: {namespace}, name: {name}') + + # Validate agent name + validate_agent_workflow_name(name, type='agent') + + # Validate YAML content before proceeding + self._validate_yaml_content( + yaml_content, namespace, name, tool_available, access_token, app_key + ) + + # Get or create namespace first + namespace_dict = await self.namespace_service.get_or_create_namespace(namespace) + + # Check if agent with this name already exists in this namespace + existing_agent = await self.agent_repository.find_one( + name=name, namespace=namespace_dict['name'] + ) + if existing_agent: + logger.warning( + f'Agent already exists with name: {name} in namespace: {namespace_dict["name"]}' + ) + raise ValueError( + f'Agent already exists with name: {name} in namespace: {namespace_dict["name"]}' + ) + + # Create agent record in DB + agent = await self.agent_repository.create( + name=name, namespace=namespace_dict['name'] + ) + + # Upload YAML to cloud storage + yaml_key = get_agent_yaml_key(namespace, name) + yaml_bytes = yaml_content.encode('utf-8') + self.cloud_storage_manager.save_small_file( + file_content=yaml_bytes, bucket_name=self.bucket_name, key=yaml_key + ) + + # Build response with YAML content + agent_dict = agent.to_dict() + agent_dict['yaml_content'] = yaml_content + + # Cache agent metadata + agent_cache_key = get_agent_by_id_cache_key(agent.id) + self.cache_manager.add( + agent_cache_key, json.dumps(agent.to_dict()), expiry=self.cache_ttl + ) + + # Cache YAML content + yaml_cache_key = get_agent_yaml_cache_key(namespace, name) + self.cache_manager.add(yaml_cache_key, yaml_content, expiry=self.cache_ttl) + + # Invalidate list caches + self.cache_manager.remove(get_agents_list_cache_key(None)) + self.cache_manager.remove(get_agents_list_cache_key(namespace)) + + logger.info( + f'Successfully created agent - namespace: {namespace}, name: {name}' + ) + return agent_dict + + async def get_agent(self, agent_id: UUID) -> dict: + """ + Get agent by ID with YAML content + + Args: + agent_id: The agent UUID + + Returns: + dict: Agent details including YAML content + + Raises: + ValueError: If agent not found + """ + # Try cache first + cache_key = get_agent_by_id_cache_key(agent_id) + cached_agent = self.cache_manager.get_str(cache_key) + + if cached_agent: + logger.info(f'Cache hit for agent ID: {agent_id}') + agent_dict = json.loads(cached_agent) + else: + # Fetch from DB + logger.info(f'Fetching agent from DB - ID: {agent_id}') + agent = await self.agent_repository.find_one(id=agent_id) + + if not agent: + raise ValueError(f'Agent not found with ID: {agent_id}') + + agent_dict = agent.to_dict() + + # Cache agent metadata + self.cache_manager.add( + cache_key, json.dumps(agent_dict), expiry=self.cache_ttl + ) + + # Fetch YAML from cloud storage (with caching) + yaml_cache_key = get_agent_yaml_cache_key( + agent_dict['namespace'], agent_dict['name'] + ) + cached_yaml = self.cache_manager.get_str(yaml_cache_key) + + if cached_yaml: + logger.info( + f'Cache hit for agent YAML - namespace: {agent_dict["namespace"]}, name: {agent_dict["name"]}' + ) + yaml_content = cached_yaml + else: + # Fetch YAML from cloud storage + yaml_key = get_agent_yaml_key(agent_dict['namespace'], agent_dict['name']) + logger.info(f'Fetching agent YAML from storage - key: {yaml_key}') + + try: + yaml_bytes = self.cloud_storage_manager.read_file( + self.bucket_name, yaml_key + ) + yaml_content = yaml_bytes.decode('utf-8') + + # Cache YAML + self.cache_manager.add( + yaml_cache_key, yaml_content, expiry=self.cache_ttl + ) + except CloudStorageFileNotFoundError: + logger.error( + f'YAML not found in cloud storage for agent ID: {agent_id}' + ) + raise ValueError(f'Agent YAML not found for agent ID: {agent_id}') + + # Add YAML to response + agent_dict['yaml_content'] = yaml_content + + logger.info(f'Successfully retrieved agent - ID: {agent_id}') + return agent_dict + + async def get_agent_yaml_from_bucket(self, agent_name: str, namespace: str) -> str: + """ + Get agent YAML content by name and namespace (for workflow references) + + This method is used by workflow services to fetch agent YAML when they + have namespace/agent_name references + + Args: + agent_name: The agent name + namespace: The namespace name + + Returns: + str: The YAML content as string + + Raises: + ValueError: If agent not found + """ + # Try YAML cache first + yaml_cache_key = get_agent_yaml_cache_key(namespace, agent_name) + cached_yaml = self.cache_manager.get_str(yaml_cache_key) + + if cached_yaml: + logger.info( + f'Cache hit for agent YAML - namespace: {namespace}, name: {agent_name}' + ) + return cached_yaml + + # Fetch YAML from cloud storage + yaml_key = get_agent_yaml_key(namespace, agent_name) + logger.info(f'Fetching agent YAML from storage - key: {yaml_key}') + + try: + yaml_bytes = self.cloud_storage_manager.read_file( + self.bucket_name, yaml_key + ) + yaml_content = yaml_bytes.decode('utf-8') + + # Cache YAML + self.cache_manager.add(yaml_cache_key, yaml_content, expiry=self.cache_ttl) + except CloudStorageFileNotFoundError: + logger.error( + f'YAML not found in cloud storage for agent: {namespace}/{agent_name}' + ) + raise ValueError( + f'Agent YAML not found for agent: {namespace}/{agent_name}' + ) + + logger.info( + f'Successfully retrieved agent YAML - namespace: {namespace}, name: {agent_name}' + ) + return yaml_content + + async def update_agent( + self, + agent_id: UUID, + yaml_content: str, + tool_available: List[Tool], + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> dict: + """ + Update existing agent YAML configuration + + Args: + agent_id: The agent UUID + yaml_content: Updated YAML configuration content + tool_available: List of available tools + + Returns: + dict: Updated agent details + + Raises: + ValueError: If agent not found or validation fails + """ + logger.info(f'Updating agent - ID: {agent_id}') + + # Fetch agent from DB + agent = await self.agent_repository.find_one(id=agent_id) + if not agent: + raise ValueError(f'Agent not found with ID: {agent_id}') + + # Validate YAML content + self._validate_yaml_content( + yaml_content, + agent.namespace, + agent.name, + tool_available, + access_token, + app_key, + ) + + # Update YAML in cloud storage + yaml_key = get_agent_yaml_key(agent.namespace, agent.name) + yaml_bytes = yaml_content.encode('utf-8') + self.cloud_storage_manager.save_small_file( + file_content=yaml_bytes, bucket_name=self.bucket_name, key=yaml_key + ) + + # Update agent timestamp in DB (triggers updated_at) + updated_agent = await self.agent_repository.find_one_and_update( + {'id': agent_id}, refresh=True + ) + + # Invalidate caches + agent_cache_key = get_agent_by_id_cache_key(agent_id) + self.cache_manager.remove(agent_cache_key) + + yaml_cache_key = get_agent_yaml_cache_key(agent.namespace, agent.name) + self.cache_manager.remove(yaml_cache_key) + + # Invalidate list caches + self.cache_manager.remove(get_agents_list_cache_key(None)) + self.cache_manager.remove(get_agents_list_cache_key(agent.namespace)) + + # Build response + agent_dict = updated_agent.to_dict() + agent_dict['yaml_content'] = yaml_content + + logger.info(f'Successfully updated agent - ID: {agent_id}') + return agent_dict + + async def delete_agent(self, agent_id: UUID) -> bool: + """ + Delete agent (DB + cloud storage) + + Args: + agent_id: The agent UUID + + Returns: + bool: Success status + + Raises: + ValueError: If agent not found + """ + logger.info(f'Deleting agent - ID: {agent_id}') + + # Fetch agent from DB + agent = await self.agent_repository.find_one(id=agent_id) + if not agent: + raise ValueError(f'Agent not found with ID: {agent_id}') + + # Delete from DB + await self.agent_repository.delete_all(id=agent_id) + + # Delete YAML from cloud storage + yaml_key = get_agent_yaml_key(agent.namespace, agent.name) + try: + self.cloud_storage_manager.delete_file(self.bucket_name, yaml_key) + except Exception as e: + logger.error(f'Failed to delete YAML from cloud storage: {str(e)}') + # Continue - DB record is deleted + + # Invalidate caches + agent_cache_key = get_agent_by_id_cache_key(agent_id) + self.cache_manager.remove(agent_cache_key) + + yaml_cache_key = get_agent_yaml_cache_key(agent.namespace, agent.name) + self.cache_manager.remove(yaml_cache_key) + + # Invalidate list caches + self.cache_manager.remove(get_agents_list_cache_key(None)) + self.cache_manager.remove(get_agents_list_cache_key(agent.namespace)) + + logger.info(f'Successfully deleted agent - ID: {agent_id}') + return True + + async def list_agents(self, namespace: Optional[str] = None) -> List[dict]: + """ + List agents from database with optional namespace filtering + + Args: + namespace: Optional namespace to filter agents + + Returns: + List[dict]: List of agents (without YAML content) + """ + # Try cache first + cache_key = get_agents_list_cache_key(namespace) + cached_list = self.cache_manager.get_str(cache_key) + + if cached_list: + logger.info(f'Cache hit for agents list - namespace: {namespace}') + return json.loads(cached_list) + + # Fetch from DB + logger.info(f'Fetching agents list from DB - namespace: {namespace}') + + if namespace: + agents = await self.agent_repository.find(namespace=namespace) + else: + agents = await self.agent_repository.find() + + agents_list = [agent.to_dict() for agent in agents] + + # Cache the result (shorter TTL for lists) + self.cache_manager.add( + cache_key, json.dumps(agents_list), expiry=1800 + ) # 30 min + + logger.info( + f'Successfully retrieved {len(agents_list)} agents - namespace: {namespace}' + ) + return agents_list diff --git a/wavefront/server/modules/agents_module/agents_module/services/agent_inference_service.py b/wavefront/server/modules/agents_module/agents_module/services/agent_inference_service.py new file mode 100644 index 00000000..4f5c56a5 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/agent_inference_service.py @@ -0,0 +1,255 @@ +import time +from typing import Any, Dict, List, Optional +from uuid import UUID + +from agents_module.services.agent_crud_service import AgentCrudService +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from flo_ai import AgentBuilder, Agent, BaseMessage, FloUtils +from flo_ai.llm import OpenAI, Anthropic, Gemini, OllamaLLM, OpenAIVLLM +from common_module.log.logger import logger +from tools_module.registry.tool_loader import ToolLoader +import yaml + + +class AgentInferenceService: + """Service for handling agent inference operations""" + + def __init__( + self, + cache_manager: CacheManager, + tool_loader: ToolLoader, + agent_crud_service: AgentCrudService, + ): + """ + Initialize the agent inference service + + Args: + cache_manager: Cache manager instance + tool_loader: Tool loader instance + agent_crud_service: Agent CRUD service for fetching agent YAML + """ + self.cache_manager = cache_manager + self.tool_loader = tool_loader + self.agent_crud_service = agent_crud_service + + async def create_agent_from_yaml( + self, + yaml_content: str, + agent_name: str, + llm_config: Optional[LlmInferenceConfig] = None, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ): + """ + Create agent instance from YAML configuration + + Args: + yaml_content: YAML configuration content + agent_name: The name of the agent for logging purposes + llm_config: Optional LLM configuration to override agent's default LLM + + Returns: + Agent instance created from YAML + """ + logger.info(f'Creating agent from YAML for agent: {agent_name}') + + # Add tools if provided in the yaml file + yaml_data = yaml.safe_load(yaml_content) + tool_names = yaml_data.get('agent', {}).get('tools', []) + tool_register = {} + if tool_names: + logger.info(f'Loading tools for agent {agent_name}: {tool_names}') + for tool in tool_names: + tools = self.tool_loader.load_tool_with_name(tool.get('name')) + tool_register[tool.get('name')] = tools + else: + logger.warning(f'No tools were loaded for agent {agent_name}') + + agent_builder = AgentBuilder.from_yaml( + yaml_str=yaml_content, + tool_registry=tool_register, + access_token=access_token, + app_key=app_key, + ) + + # Override LLM if config is provided + if llm_config: + logger.info( + f'Overriding LLM with config: {llm_config.display_name} (type: {llm_config.type})' + ) + llm_instance = self._create_llm_instance(llm_config) + agent_builder = agent_builder.with_llm(llm_instance) + + agent = agent_builder.build() + logger.info(f'Successfully created agent for agent: {agent_name}') + return agent + + def _create_llm_instance(self, config: LlmInferenceConfig): + """ + Create LLM instance based on configuration + + Args: + config: LLM inference configuration + + Returns: + LLM instance + """ + if config.type == 'openai': + return OpenAI(model=config.llm_model, api_key=config.api_key) + elif config.type == 'azure_openai': + return OpenAI( + model=config.llm_model, api_key=config.api_key, base_url=config.base_url + ) + elif config.type == 'anthropic': + return Anthropic(model=config.llm_model, api_key=config.api_key) + elif config.type == 'gemini': + return Gemini(model=config.llm_model, api_key=config.api_key) + elif config.type == 'ollama': + return OllamaLLM(model=config.llm_model, base_url=config.base_url) + elif config.type == 'vllm': + return OpenAIVLLM(model=config.llm_model, base_url=config.base_url) + else: + raise ValueError(f'Unsupported LLM type: {config.type}') + + async def run_agent_inference( + self, + agent: Agent, + inputs: List[BaseMessage] | str, + variables: Dict[str, Any], + agent_name: str, + output_json_enabled: bool = True, + ) -> tuple[str, float]: + """ + Run agent inference with provided variables + + Args: + agent: Agent instance + inputs: Inputs to use for inference + variables: Variables to pass to the agent + agent_name: The name of the agent for logging purposes + output_json_enabled: Whether to extract JSON from the response + + Returns: + tuple: (result, execution_time) + """ + logger.info( + f'Running inference for agent {agent_name} with variables: {list(variables.keys())}' + ) + start_time = time.time() + + # Use a generic prompt that allows the agent to use the variables + result_str = await agent.run(inputs, variables=variables) + + # Conditionally extract JSON based on output_json_enabled flag + if output_json_enabled: + result = FloUtils.extract_jsons_from_string(result_str) + else: + result = result_str + + execution_time = time.time() - start_time + logger.info( + f'Successfully completed inference for agent {agent_name} in {execution_time:.2f} seconds' + ) + + return result, execution_time + + async def perform_inference( + self, + agent_id: str, + namespace: str, + variables: Dict[str, Any], + inputs: List[BaseMessage] | str, + llm_config: Optional[LlmInferenceConfig] = None, + output_json_enabled: bool = True, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> tuple[str, float]: + """ + Complete inference workflow: fetch YAML, create agent, run inference + + Args: + agent_id: The ID of the agent + namespace: The namespace of the agent + variables: Variables to pass to the agent + inputs: Inputs to use for inference + llm_config: Optional LLM configuration to override agent's default LLM + output_json_enabled: Whether to extract JSON from the response + + Returns: + tuple: (result, execution_time) + """ + + # Fetch agent YAML using CRUD service + yaml_content = await self.agent_crud_service.get_agent_yaml_from_bucket( + agent_id, namespace + ) + + # Create agent from YAML with optional LLM override and tools + agent = await self.create_agent_from_yaml( + yaml_content, agent_id, llm_config, access_token, app_key + ) + + # Run inference + result, execution_time = await self.run_agent_inference( + agent, inputs, variables, agent_id, output_json_enabled + ) + + return result, execution_time + + async def perform_inference_v2( + self, + agent_id: UUID, + variables: Dict[str, Any], + inputs: List[BaseMessage] | str, + llm_config: Optional[LlmInferenceConfig] = None, + output_json_enabled: bool = True, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> tuple[str, float, str]: + """ + Complete inference workflow (v2): fetch agent from DB + cloud storage, run inference + + Args: + agent_id: The UUID of the agent + variables: Variables to pass to the agent + inputs: Inputs to use for inference + llm_config: Optional LLM configuration to override agent's default LLM + output_json_enabled: Whether to extract JSON from the response + + Returns: + tuple: (result, execution_time, namespace) + + Raises: + ValueError: If agent_crud_service is not initialized or agent not found + """ + if not self.agent_crud_service: + raise ValueError( + 'agent_crud_service not initialized. Required for v2 inference.' + ) + + logger.info(f'Starting v2 inference for agent_id: {agent_id}') + + # Fetch agent from DB + cloud storage (includes YAML content) + agent_data = await self.agent_crud_service.get_agent(agent_id) + + # Extract details + namespace = agent_data['namespace'] + name = agent_data['name'] + yaml_content = agent_data['yaml_content'] + + logger.info( + f'Retrieved agent - namespace: {namespace}, name: {name}, agent_id: {agent_id}' + ) + + # Create agent from YAML with optional LLM override and tools + agent = await self.create_agent_from_yaml( + yaml_content, name, llm_config, access_token, app_key + ) + + # Run inference + result, execution_time = await self.run_agent_inference( + agent, inputs, variables, name, output_json_enabled + ) + + return result, execution_time, namespace diff --git a/wavefront/server/modules/agents_module/agents_module/services/namespace_service.py b/wavefront/server/modules/agents_module/agents_module/services/namespace_service.py new file mode 100644 index 00000000..c1b097c8 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/namespace_service.py @@ -0,0 +1,176 @@ +from typing import List +import json + +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.namespace import Namespace +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from common_module.log.logger import logger +from agents_module.utils.cache_utils import ( + get_namespace_cache_key, + get_namespaces_list_cache_key, +) + + +class NamespaceService: + """Service for handling namespace operations with caching""" + + def __init__( + self, + namespace_repository: SQLAlchemyRepository[Namespace], + cache_manager: CacheManager, + ): + self.namespace_repository = namespace_repository + self.cache_manager = cache_manager + self.cache_ttl = 7200 # 2 hours for namespaces (they change less frequently) + + async def get_namespace(self, name: str) -> dict: + """ + Get namespace by name with caching + + Args: + name: The namespace name + + Returns: + dict: Namespace details + + Raises: + ValueError: If namespace not found + """ + cache_key = get_namespace_cache_key(name) + + # Try cache first + cached_namespace = self.cache_manager.get_str(cache_key) + if cached_namespace: + logger.info(f'Cache hit for namespace: {name}') + return json.loads(cached_namespace) + + # Fetch from DB + logger.info(f'Fetching namespace from DB: {name}') + namespace = await self.namespace_repository.find_one(name=name) + + if not namespace: + raise ValueError(f'Namespace not found: {name}') + + namespace_dict = namespace.to_dict() + + # Cache the result + self.cache_manager.add( + cache_key, json.dumps(namespace_dict), expiry=self.cache_ttl + ) + + return namespace_dict + + async def create_namespace(self, name: str) -> dict: + """ + Create a new namespace + + Args: + name: The namespace name + + Returns: + dict: Created namespace details + + Raises: + ValueError: If namespace already exists + """ + # Check if namespace already exists + existing = await self.namespace_repository.find_one(name=name) + if existing: + logger.warning(f'Namespace already exists: {name}') + raise ValueError(f'Namespace already exists: {name}') + + # Create namespace + logger.info(f'Creating namespace: {name}') + namespace = Namespace(name=name) + created_namespace = await self.namespace_repository.save(namespace) + + namespace_dict = created_namespace.to_dict() + + # Cache the new namespace + cache_key = get_namespace_cache_key(name) + self.cache_manager.add( + cache_key, json.dumps(namespace_dict), expiry=self.cache_ttl + ) + + # Invalidate list cache + list_cache_key = get_namespaces_list_cache_key() + self.cache_manager.remove(list_cache_key) + + logger.info(f'Successfully created namespace: {name}') + return namespace_dict + + async def get_or_create_namespace(self, name: str) -> dict: + """ + Get namespace if exists, otherwise create it (single DB call) + + Args: + name: The namespace name + + Returns: + dict: Namespace details + """ + cache_key = get_namespace_cache_key(name) + + # Try cache first + cached_namespace = self.cache_manager.get_str(cache_key) + if cached_namespace: + logger.info(f'Cache hit for namespace: {name}') + return json.loads(cached_namespace) + + # Check DB + namespace = await self.namespace_repository.find_one(name=name) + + if namespace: + # Namespace exists + namespace_dict = namespace.to_dict() + self.cache_manager.add( + cache_key, json.dumps(namespace_dict), expiry=self.cache_ttl + ) + return namespace_dict + else: + # Create new namespace + logger.info(f'Creating namespace: {name}') + created_namespace = await self.namespace_repository.create(name=name) + + namespace_dict = created_namespace.to_dict() + + # Cache the new namespace + self.cache_manager.add( + cache_key, json.dumps(namespace_dict), expiry=self.cache_ttl + ) + + # Invalidate list cache + list_cache_key = get_namespaces_list_cache_key() + self.cache_manager.remove(list_cache_key) + + logger.info(f'Successfully created namespace: {name}') + return namespace_dict + + async def list_namespaces(self) -> List[dict]: + """ + List all namespaces with caching + + Returns: + List[dict]: List of all namespaces + """ + cache_key = get_namespaces_list_cache_key() + + # Try cache first + cached_list = self.cache_manager.get_str(cache_key) + if cached_list: + logger.info('Cache hit for namespaces list') + return json.loads(cached_list) + + # Fetch from DB + logger.info('Fetching namespaces list from DB') + namespaces = await self.namespace_repository.find() + + namespaces_list = [ns.to_dict() for ns in namespaces] + + # Cache the result + self.cache_manager.add( + cache_key, json.dumps(namespaces_list), expiry=self.cache_ttl + ) + + logger.info(f'Successfully retrieved {len(namespaces_list)} namespaces') + return namespaces_list diff --git a/wavefront/server/modules/agents_module/agents_module/services/workflow_crud_service.py b/wavefront/server/modules/agents_module/agents_module/services/workflow_crud_service.py new file mode 100644 index 00000000..e09087c0 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/workflow_crud_service.py @@ -0,0 +1,516 @@ +import json +import yaml +from typing import Dict, List, Optional +from uuid import UUID + +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.workflow import Workflow +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from flo_cloud.cloud_storage import CloudStorageManager +from flo_cloud.exceptions import CloudStorageFileNotFoundError +from common_module.log.logger import logger +from agents_module.services.namespace_service import NamespaceService +from agents_module.utils.workflow_utils import get_workflow_yaml_key +from agents_module.utils.cache_utils import ( + get_workflow_by_id_cache_key, + get_workflow_yaml_cache_key, + get_workflows_list_cache_key, +) +from agents_module.utils.validation_utils import validate_agent_workflow_name +from flo_ai import AriumBuilder, AgentBuilder, Agent +from agents_module.services.agent_crud_service import AgentCrudService +from tools_module.registry.tool_loader import ToolLoader +from tools_module.registry.function_node_registry import FUNCTION_NODE_REGISTRY + + +class WorkflowCrudService: + """Service for handling workflow CRUD operations with DB + cloud storage""" + + def __init__( + self, + workflow_repository: SQLAlchemyRepository[Workflow], + namespace_service: NamespaceService, + cloud_storage_manager: CloudStorageManager, + cache_manager: CacheManager, + bucket_name: str, + agent_crud_service: AgentCrudService, + tool_loader: ToolLoader, + ): + """ + Initialize the workflow CRUD service + + Args: + workflow_repository: Workflow repository for DB operations + namespace_service: Namespace service for namespace operations + cloud_storage_manager: Cloud storage manager instance + cache_manager: Cache manager instance + bucket_name: Name of the bucket containing workflow YAML files + agent_crud_service: Agent CRUD service for fetching agent YAMLs + tool_loader: Tool loader for loading agent tools + """ + self.workflow_repository = workflow_repository + self.namespace_service = namespace_service + self.cloud_storage_manager = cloud_storage_manager + self.cache_manager = cache_manager + self.bucket_name = bucket_name + self.agent_crud_service = agent_crud_service + self.tool_loader = tool_loader + self.cache_ttl = 3600 # 1 hour for workflows + + def _extract_agent_references(self, yaml_content: str) -> List[str]: + """ + Extract agent references (namespace/agent_name) from workflow YAML + + Args: + yaml_content: YAML configuration content + + Returns: + List of agent references in format 'namespace/agent_name' + """ + try: + yaml_data = yaml.safe_load(yaml_content) + arium_config = yaml_data.get('arium', {}) + agents_config = arium_config.get('agents', []) + + agent_references = [] + for agent_def in agents_config: + agent_name = agent_def.get('name', '') + # If agent name contains '/', it's a reference to cloud storage + if '/' in agent_name: + agent_references.append(agent_name) + logger.info(f'Found agent reference: {agent_name}') + + return agent_references + except Exception as e: + logger.error(f'Error extracting agent references from YAML: {str(e)}') + return [] + + async def _build_referenced_agents( + self, + agent_references: List[str], + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> Dict[str, Agent]: + """ + Fetch and build agent instances for referenced agents + + Args: + agent_references: List of agent references in format 'namespace/agent_name' + + Returns: + Dictionary mapping agent reference to built Agent instance + """ + agents_dict = {} + + for agent_ref in agent_references: + try: + # Split namespace/agent_name + if '/' not in agent_ref: + logger.warning( + f'Invalid agent reference format: {agent_ref}, expected namespace/agent_name' + ) + continue + + parts = agent_ref.split('/', 1) + namespace = parts[0] + agent_name = parts[1] + + logger.info( + f'Fetching and building agent for validation: namespace={namespace}, agent_name={agent_name}' + ) + + # Use AgentCrudService to fetch agent YAML (handles caching automatically) + agent_yaml_content = ( + await self.agent_crud_service.get_agent_yaml_from_bucket( + agent_name, namespace + ) + ) + + # Parse YAML to get tools + yaml_data = yaml.safe_load(agent_yaml_content) + tool_names = yaml_data.get('agent', {}).get('tools', []) + tool_registry = {} + + if tool_names: + logger.info(f'Loading tools for agent {agent_ref}: {tool_names}') + for tool in tool_names: + tool_name = tool.get('name') + if tool_name: + tools = self.tool_loader.load_tool_with_name(tool_name) + tool_registry[tool_name] = tools + + # Build agent + agent = AgentBuilder.from_yaml( + yaml_str=agent_yaml_content, + tool_registry=tool_registry, + access_token=access_token, + app_key=app_key, + ).build() + + agents_dict[agent_ref] = agent + logger.info(f'Successfully built agent for validation: {agent_ref}') + + except Exception as e: + logger.error(f'Error building referenced agent {agent_ref}: {str(e)}') + raise ValueError( + f'Failed to build referenced agent {agent_ref}: {str(e)}' + ) + + return agents_dict + + async def _validate_yaml_content( + self, + yaml_content: str, + namespace: str, + workflow_name: str, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> None: + """ + Validate YAML content by attempting to build a workflow from it + + Args: + yaml_content: The YAML content to validate + namespace: The namespace for logging purposes + workflow_name: The workflow name for logging purposes + + Raises: + ValueError: If YAML is invalid or workflow cannot be built + """ + try: + # Extract and build referenced agents + agent_references = self._extract_agent_references(yaml_content) + agents_dict = {} + + if agent_references: + logger.info( + f'Building {len(agent_references)} referenced agents for validation' + ) + agents_dict = await self._build_referenced_agents( + agent_references, access_token, app_key + ) + + # Validate workflow with pre-built agents + arium_instance = AriumBuilder.from_yaml( + yaml_str=yaml_content, + agents=agents_dict, + function_registry=FUNCTION_NODE_REGISTRY, + access_token=access_token, + app_key=app_key, + ).build() + + # compile to verify whether the graph is correct + arium_instance.compile() + + logger.info( + f'YAML validation successful for namespace: {namespace}, workflow: {workflow_name}' + ) + except Exception as e: + logger.error( + f'YAML validation failed for namespace: {namespace}, workflow: {workflow_name}: {str(e)}' + ) + raise ValueError(f'Invalid workflow YAML configuration: {str(e)}') + + async def create_workflow( + self, + name: str, + namespace: str, + yaml_content: str, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> dict: + """ + Create a new workflow (DB + cloud storage) + + Args: + name: The workflow name + namespace: The namespace name (will be created if doesn't exist) + yaml_content: YAML configuration content + + Returns: + dict: Created workflow details including YAML content + + Raises: + ValueError: If workflow already exists or validation fails + """ + logger.info(f'Creating workflow - namespace: {namespace}, name: {name}') + + # Validate workflow name + validate_agent_workflow_name(name, type='workflow') + + # Validate YAML content before proceeding + await self._validate_yaml_content( + yaml_content, namespace, name, access_token, app_key + ) + + # Get or create namespace first + namespace_dict = await self.namespace_service.get_or_create_namespace(namespace) + + # Check if workflow with this name already exists in this namespace + existing_workflow = await self.workflow_repository.find_one( + name=name, namespace=namespace_dict['name'] + ) + if existing_workflow: + logger.warning( + f'Workflow already exists with name: {name} in namespace: {namespace_dict["name"]}' + ) + raise ValueError( + f'Workflow already exists with name: {name} in namespace: {namespace_dict["name"]}' + ) + + # Create workflow record in DB + workflow = await self.workflow_repository.create( + name=name, namespace=namespace_dict['name'] + ) + + # Upload YAML to cloud storage + yaml_key = get_workflow_yaml_key(namespace, name) + yaml_bytes = yaml_content.encode('utf-8') + self.cloud_storage_manager.save_small_file( + file_content=yaml_bytes, bucket_name=self.bucket_name, key=yaml_key + ) + + # Build response with YAML content + workflow_dict = workflow.to_dict() + workflow_dict['yaml_content'] = yaml_content + + # Cache workflow metadata + workflow_cache_key = get_workflow_by_id_cache_key(workflow.id) + self.cache_manager.add( + workflow_cache_key, json.dumps(workflow.to_dict()), expiry=self.cache_ttl + ) + + # Cache YAML content + yaml_cache_key = get_workflow_yaml_cache_key(namespace, name) + self.cache_manager.add(yaml_cache_key, yaml_content, expiry=self.cache_ttl) + + # Invalidate list caches + self.cache_manager.remove(get_workflows_list_cache_key(None)) + self.cache_manager.remove(get_workflows_list_cache_key(namespace)) + + logger.info( + f'Successfully created workflow - namespace: {namespace}, name: {name}' + ) + return workflow_dict + + async def get_workflow(self, workflow_id: UUID) -> dict: + """ + Get workflow by ID with YAML content + + Args: + workflow_id: The workflow UUID + + Returns: + dict: Workflow details including YAML content + + Raises: + ValueError: If workflow not found + """ + # Try cache first + cache_key = get_workflow_by_id_cache_key(workflow_id) + cached_workflow = self.cache_manager.get_str(cache_key) + + if cached_workflow: + logger.info(f'Cache hit for workflow ID: {workflow_id}') + workflow_dict = json.loads(cached_workflow) + else: + # Fetch from DB + logger.info(f'Fetching workflow from DB - ID: {workflow_id}') + workflow = await self.workflow_repository.find_one(id=workflow_id) + + if not workflow: + raise ValueError(f'Workflow not found with ID: {workflow_id}') + + workflow_dict = workflow.to_dict() + + # Cache workflow metadata + self.cache_manager.add( + cache_key, json.dumps(workflow_dict), expiry=self.cache_ttl + ) + + # Fetch YAML from cloud storage (with caching) + yaml_cache_key = get_workflow_yaml_cache_key( + workflow_dict['namespace'], workflow_dict['name'] + ) + cached_yaml = self.cache_manager.get_str(yaml_cache_key) + + if cached_yaml: + logger.info( + f'Cache hit for workflow YAML - namespace: {workflow_dict["namespace"]}, name: {workflow_dict["name"]}' + ) + yaml_content = cached_yaml + else: + # Fetch YAML from cloud storage + yaml_key = get_workflow_yaml_key( + workflow_dict['namespace'], workflow_dict['name'] + ) + logger.info(f'Fetching workflow YAML from storage - key: {yaml_key}') + + try: + yaml_bytes = self.cloud_storage_manager.read_file( + self.bucket_name, yaml_key + ) + yaml_content = yaml_bytes.decode('utf-8') + + # Cache YAML + self.cache_manager.add( + yaml_cache_key, yaml_content, expiry=self.cache_ttl + ) + except CloudStorageFileNotFoundError: + logger.error( + f'YAML not found in cloud storage for workflow ID: {workflow_id}' + ) + raise ValueError( + f'Workflow YAML not found for workflow ID: {workflow_id}' + ) + + # Add YAML to response + workflow_dict['yaml_content'] = yaml_content + + logger.info(f'Successfully retrieved workflow - ID: {workflow_id}') + return workflow_dict + + async def update_workflow( + self, + workflow_id: UUID, + yaml_content: str, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> dict: + """ + Update existing workflow YAML configuration + + Args: + workflow_id: The workflow UUID + yaml_content: Updated YAML configuration content + + Returns: + dict: Updated workflow details + + Raises: + ValueError: If workflow not found or validation fails + """ + logger.info(f'Updating workflow - ID: {workflow_id}') + + # Fetch workflow from DB + workflow = await self.workflow_repository.find_one(id=workflow_id) + if not workflow: + raise ValueError(f'Workflow not found with ID: {workflow_id}') + + # Validate YAML content + await self._validate_yaml_content( + yaml_content, workflow.namespace, workflow.name, access_token, app_key + ) + + # Update YAML in cloud storage + yaml_key = get_workflow_yaml_key(workflow.namespace, workflow.name) + yaml_bytes = yaml_content.encode('utf-8') + self.cloud_storage_manager.save_small_file( + file_content=yaml_bytes, bucket_name=self.bucket_name, key=yaml_key + ) + + # Update workflow timestamp in DB (triggers updated_at) + updated_workflow = await self.workflow_repository.find_one_and_update( + {'id': workflow_id}, refresh=True + ) + + # Invalidate caches + workflow_cache_key = get_workflow_by_id_cache_key(workflow_id) + self.cache_manager.remove(workflow_cache_key) + + yaml_cache_key = get_workflow_yaml_cache_key(workflow.namespace, workflow.name) + self.cache_manager.remove(yaml_cache_key) + + # Invalidate list caches + self.cache_manager.remove(get_workflows_list_cache_key(None)) + self.cache_manager.remove(get_workflows_list_cache_key(workflow.namespace)) + + # Build response + workflow_dict = updated_workflow.to_dict() + workflow_dict['yaml_content'] = yaml_content + + logger.info(f'Successfully updated workflow - ID: {workflow_id}') + return workflow_dict + + async def delete_workflow(self, workflow_id: UUID) -> bool: + """ + Delete workflow (DB + cloud storage) + + Args: + workflow_id: The workflow UUID + + Returns: + bool: Success status + + Raises: + ValueError: If workflow not found + """ + logger.info(f'Deleting workflow - ID: {workflow_id}') + + # Fetch workflow from DB + workflow = await self.workflow_repository.find_one(id=workflow_id) + if not workflow: + raise ValueError(f'Workflow not found with ID: {workflow_id}') + + # Delete from DB + await self.workflow_repository.delete_all(id=workflow_id) + + # Delete YAML from cloud storage + yaml_key = get_workflow_yaml_key(workflow.namespace, workflow.name) + try: + self.cloud_storage_manager.delete_file(self.bucket_name, yaml_key) + except Exception as e: + logger.error(f'Failed to delete YAML from cloud storage: {str(e)}') + # Continue - DB record is deleted + + # Invalidate caches + workflow_cache_key = get_workflow_by_id_cache_key(workflow_id) + self.cache_manager.remove(workflow_cache_key) + + yaml_cache_key = get_workflow_yaml_cache_key(workflow.namespace, workflow.name) + self.cache_manager.remove(yaml_cache_key) + + # Invalidate list caches + self.cache_manager.remove(get_workflows_list_cache_key(None)) + self.cache_manager.remove(get_workflows_list_cache_key(workflow.namespace)) + + logger.info(f'Successfully deleted workflow - ID: {workflow_id}') + return True + + async def list_workflows(self, namespace: Optional[str] = None) -> List[dict]: + """ + List workflows from database with optional namespace filtering + + Args: + namespace: Optional namespace to filter workflows + + Returns: + List[dict]: List of workflows (without YAML content) + """ + # Try cache first + cache_key = get_workflows_list_cache_key(namespace) + cached_list = self.cache_manager.get_str(cache_key) + + if cached_list: + logger.info(f'Cache hit for workflows list - namespace: {namespace}') + return json.loads(cached_list) + + # Fetch from DB + logger.info(f'Fetching workflows list from DB - namespace: {namespace}') + + if namespace: + workflows = await self.workflow_repository.find(namespace=namespace) + else: + workflows = await self.workflow_repository.find() + + workflows_list = [workflow.to_dict() for workflow in workflows] + + # Cache the result (shorter TTL for lists) + self.cache_manager.add( + cache_key, json.dumps(workflows_list), expiry=1800 + ) # 30 min + + logger.info( + f'Successfully retrieved {len(workflows_list)} workflows - namespace: {namespace}' + ) + return workflows_list diff --git a/wavefront/server/modules/agents_module/agents_module/services/workflow_events.py b/wavefront/server/modules/agents_module/agents_module/services/workflow_events.py new file mode 100644 index 00000000..ad185e42 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/workflow_events.py @@ -0,0 +1,136 @@ +import asyncio +from typing import Dict, Callable, List +from flo_ai.arium import AriumEventType, AriumEvent +from common_module.log.logger import logger +from agents_module.models.workflow_schemas import WorkflowEventMessage + + +class WorkflowEventStreamer: + """Manager for HTTP streaming workflow events with user isolation using asyncio.Queue""" + + def __init__(self): + # Store event queues by user-specific workflow key (user_id_namespace_workflow_id) + self.event_queues: Dict[str, asyncio.Queue] = {} + + def get_workflow_key(self, user_id: str, namespace: str, workflow_id: str) -> str: + """Generate unique key for user-specific workflow""" + return f'{user_id}_{namespace}_{workflow_id}' + + def get_or_create_queue( + self, user_id: str, namespace: str, workflow_id: str + ) -> asyncio.Queue: + """Get or create event queue for user-specific workflow""" + workflow_key = self.get_workflow_key(user_id, namespace, workflow_id) + + if workflow_key not in self.event_queues: + self.event_queues[workflow_key] = asyncio.Queue() + logger.info( + f'Created event queue for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + return self.event_queues[workflow_key] + + async def add_event( + self, + user_id: str, + namespace: str, + workflow_id: str, + event_message: WorkflowEventMessage, + ): + """Add event to the queue for user-specific workflow""" + workflow_key = self.get_workflow_key(user_id, namespace, workflow_id) + + if workflow_key not in self.event_queues: + # Create queue if it doesn't exist (workflow started before streaming) + self.event_queues[workflow_key] = asyncio.Queue() + logger.info( + f'Created event queue for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + try: + # Convert event message to dict for JSON serialization + event_dict = event_message.model_dump() + await self.event_queues[workflow_key].put(event_dict) + logger.debug( + f"Event queued for user {user_id}, workflow {namespace}/{workflow_id}: {event_dict['event_type']}" + ) + except Exception as e: + logger.error( + f'Error queuing event for user {user_id}, workflow {namespace}/{workflow_id}: {e}' + ) + + def cleanup_queue(self, user_id: str, namespace: str, workflow_id: str): + """Remove event queue for user-specific workflow""" + workflow_key = self.get_workflow_key(user_id, namespace, workflow_id) + + if workflow_key in self.event_queues: + del self.event_queues[workflow_key] + logger.info( + f'Cleaned up event queue for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + +# Global event streamer instance +event_streamer = WorkflowEventStreamer() + + +# Hardcoded events filter - listen to all event types +DEFAULT_EVENTS_FILTER: List[AriumEventType] = [ + AriumEventType.WORKFLOW_STARTED, + AriumEventType.WORKFLOW_COMPLETED, + AriumEventType.WORKFLOW_FAILED, + AriumEventType.NODE_STARTED, + AriumEventType.NODE_COMPLETED, + AriumEventType.NODE_FAILED, + AriumEventType.ROUTER_DECISION, + AriumEventType.EDGE_TRAVERSED, +] + + +def create_workflow_event_callback( + user_id: str, namespace: str, workflow_id: str +) -> Callable[[AriumEvent], None]: + """ + Create a hardcoded event callback function for user-specific HTTP streaming + + Args: + user_id: User ID from authenticated session + namespace: Workflow namespace + workflow_id: Workflow ID + + Returns: + Event callback function that queues events for HTTP streaming + """ + + def event_callback(event: AriumEvent) -> None: + """ + Hardcoded callback that converts AriumEvent to WorkflowEventMessage and queues for HTTP streaming + """ + try: + # Convert AriumEvent to WorkflowEventMessage + event_message = WorkflowEventMessage( + event_type=event.event_type.value, + timestamp=event.timestamp, + workflow_id=workflow_id, + namespace=namespace, + node_name=event.node_name, + node_type=event.node_type, + execution_time=event.execution_time, + error=event.error, + router_choice=event.router_choice, + metadata=event.metadata, + ) + + # Queue event for HTTP streaming (async operation, we'll queue it) + asyncio.create_task( + event_streamer.add_event(user_id, namespace, workflow_id, event_message) + ) + + logger.debug( + f'Workflow event queued: {event.event_type.value} for user {user_id}, workflow {namespace}/{workflow_id}' + ) + + except Exception as e: + logger.error(f'Error in workflow event callback for user {user_id}: {e}') + + return event_callback diff --git a/wavefront/server/modules/agents_module/agents_module/services/workflow_inference_service.py b/wavefront/server/modules/agents_module/agents_module/services/workflow_inference_service.py new file mode 100644 index 00000000..8762e415 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/services/workflow_inference_service.py @@ -0,0 +1,388 @@ +import time +from typing import Any, Dict, List, Optional, Callable +import yaml + +from db_repo_module.cache.cache_manager import CacheManager +from flo_ai import AriumBuilder, BaseMessage, FloUtils, Arium, AgentBuilder, Agent +from flo_cloud.cloud_storage import CloudStorageManager +from common_module.log.logger import logger +from agents_module.utils.workflow_utils import get_workflow_yaml_key +from agents_module.utils.cache_utils import get_workflow_yaml_cache_key +from flo_ai.arium import AriumEventType, AriumEvent, MessageMemoryItem +from agents_module.services.agent_crud_service import AgentCrudService +from tools_module.registry.tool_loader import ToolLoader +from tools_module.registry.function_node_registry import FUNCTION_NODE_REGISTRY + + +class WorkflowInferenceService: + """Service for handling workflow inference operations""" + + def __init__( + self, + cloud_storage_manager: CloudStorageManager, + cache_manager: CacheManager, + bucket_name: str, + agent_crud_service: Optional[AgentCrudService] = None, + tool_loader: Optional[ToolLoader] = None, + ): + """ + Initialize the workflow inference service + + Args: + cloud_storage_manager: Cloud storage manager instance + cache_manager: Cache manager instance + bucket_name: Name of the bucket containing workflow YAML files + agent_crud_service: Agent CRUD service for fetching agent YAMLs + tool_loader: Tool loader for loading agent tools + """ + self.cloud_storage_manager = cloud_storage_manager + self.bucket_name = bucket_name + self.cache_manager = cache_manager + self.agent_crud_service = agent_crud_service + self.tool_loader = tool_loader + + async def fetch_workflow_yaml(self, workflow_name: str, namespace: str) -> str: + """ + Fetch workflow YAML configuration from cloud storage + + Args: + workflow_name: The name of the workflow + namespace: The namespace of the workflow + + Returns: + str: YAML content as string + """ + yaml_key = get_workflow_yaml_key(namespace, workflow_name) + cache_key = get_workflow_yaml_cache_key(namespace, workflow_name) + + # Try to get from cache first + cached_result = self.cache_manager.get_str(cache_key) + if cached_result: + logger.info( + f'Cache hit fetching workflow YAML for namespace: {namespace}, workflow: {workflow_name}' + ) + return cached_result + + logger.info( + f'Fetching workflow YAML for namespace: {namespace}, workflow: {workflow_name}' + ) + yaml_bytes: bytes = self.cloud_storage_manager.read_file( + self.bucket_name, yaml_key + ) + yaml_content = yaml_bytes.decode('utf-8') + + self.cache_manager.add(cache_key, yaml_content, expiry=3600) + + logger.info( + f'Successfully fetched workflow YAML for namespace: {namespace}, workflow: {workflow_name}' + ) + return yaml_content + + def _extract_agent_references(self, yaml_content: str) -> List[str]: + """ + Extract agent references (namespace/agent_name) from workflow YAML + + Args: + yaml_content: YAML configuration content + + Returns: + List of agent references in format 'namespace/agent_name' + """ + try: + yaml_data = yaml.safe_load(yaml_content) + arium_config = yaml_data.get('arium', {}) + agents_config = arium_config.get('agents', []) + + agent_references = [] + for agent_def in agents_config: + agent_name = agent_def.get('name', '') + # If agent name contains '/', it's a reference to cloud storage + if '/' in agent_name: + agent_references.append(agent_name) + logger.info(f'Found agent reference: {agent_name}') + + return agent_references + except Exception as e: + logger.error(f'Error extracting agent references from YAML: {str(e)}') + return [] + + async def _build_referenced_agents( + self, + agent_references: List[str], + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> Dict[str, Agent]: + """ + Fetch and build agent instances for referenced agents + + Args: + agent_references: List of agent references in format 'namespace/agent_name' + + Returns: + Dictionary mapping agent reference to built Agent instance + """ + agents_dict = {} + + for agent_ref in agent_references: + try: + # Split namespace/agent_name + if '/' not in agent_ref: + logger.warning( + f'Invalid agent reference format: {agent_ref}, expected namespace/agent_name' + ) + continue + + parts = agent_ref.split('/', 1) + namespace = parts[0] + agent_name = parts[1] + + logger.info( + f'Fetching and building agent: namespace={namespace}, agent_name={agent_name}' + ) + + # Use AgentCrudService to fetch agent YAML (handles caching automatically) + agent_yaml_content = ( + await self.agent_crud_service.get_agent_yaml_from_bucket( + agent_name, namespace + ) + ) + + # Parse YAML to get tools + yaml_data = yaml.safe_load(agent_yaml_content) + tool_names = yaml_data.get('agent', {}).get('tools', []) + tool_registry = {} + + if tool_names: + logger.info(f'Loading tools for agent {agent_ref}: {tool_names}') + for tool in tool_names: + tool_name = tool.get('name') + if tool_name: + tools = self.tool_loader.load_tool_with_name(tool_name) + tool_registry[tool_name] = tools + else: + logger.info(f'No tools configured for agent {agent_ref}') + + # Build agent + agent = AgentBuilder.from_yaml( + yaml_str=agent_yaml_content, + tool_registry=tool_registry, + access_token=access_token, + app_key=app_key, + ).build() + + agents_dict[agent_ref] = agent + logger.info(f'Successfully built agent: {agent_ref}') + + except Exception as e: + logger.error(f'Error building referenced agent {agent_ref}: {str(e)}') + raise ValueError( + f'Failed to build referenced agent {agent_ref}: {str(e)}' + ) + + return agents_dict + + async def create_workflow_from_yaml( + self, + yaml_content: str, + workflow_name: str, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ): + """ + Create workflow instance from YAML configuration + + Args: + yaml_content: YAML configuration content + workflow_name: The name of the workflow for logging purposes + + Returns: + Workflow instance created from YAML + """ + logger.info(f'Creating workflow from YAML for workflow: {workflow_name}') + + # Extract and build referenced agents + agent_references = self._extract_agent_references(yaml_content) + agents_dict = {} + + if agent_references: + logger.info( + f'Building {len(agent_references)} referenced agents for workflow {workflow_name}' + ) + agents_dict = await self._build_referenced_agents( + agent_references, access_token, app_key + ) + + # Build workflow with pre-built agents + workflow_builder = AriumBuilder.from_yaml( + agents=agents_dict, + yaml_str=yaml_content, + function_registry=FUNCTION_NODE_REGISTRY, + access_token=access_token, + app_key=app_key, + ) + workflow = workflow_builder.build() + + logger.info(f'Successfully created workflow for workflow: {workflow_name}') + return workflow + + async def run_workflow_inference( + self, + workflow: Arium, + inputs: List[BaseMessage] | str, + variables: Dict[str, Any], + workflow_name: str, + output_json_enabled: bool = True, + event_callback: Optional[Callable[[AriumEvent], None]] = None, + events_filter: Optional[List[AriumEventType]] = None, + ) -> tuple[str, float]: + """ + Run workflow inference with provided variables + + Args: + workflow: Workflow instance + inputs: Inputs to use for inference + variables: Variables to pass to the workflow + workflow_name: The name of the workflow for logging purposes + output_json_enabled: Whether to extract JSON from the response + event_callback: Optional callback function for workflow events + events_filter: Optional list of event types to filter + + Returns: + tuple: (result, execution_time) + """ + logger.info( + f'Running inference for workflow {workflow_name} with variables: {list(variables.keys())}' + ) + start_time = time.time() + + # Convert string input to list if necessary + if isinstance(inputs, str): + processed_inputs = [inputs] + else: + processed_inputs = inputs + + # Run workflow inference with optional event streaming + result_list: List[MessageMemoryItem] = await workflow.run( + processed_inputs, + variables=variables, + event_callback=event_callback, + events_filter=events_filter, + ) + + result_str = str(result_list[-1].result.content) + + # Conditionally extract JSON based on output_json_enabled flag + if output_json_enabled: + result = FloUtils.extract_jsons_from_string(result_str) + else: + result = result_str + + execution_time = time.time() - start_time + logger.info( + f'Successfully completed inference for workflow {workflow_name} in {execution_time:.2f} seconds' + ) + + return result, execution_time + + async def perform_inference( + self, + workflow_name: str, + namespace: str, + variables: Dict[str, Any], + inputs: List[BaseMessage] | str, + output_json_enabled: bool = True, + event_callback: Optional[Callable[[AriumEvent], None]] = None, + events_filter: Optional[List[AriumEventType]] = None, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> tuple[str, float]: + """ + Complete inference workflow: fetch YAML, create workflow, run inference + + Args: + workflow_name: The ID of the workflow + namespace: The namespace of the workflow + variables: Variables to pass to the workflow + inputs: Inputs to use for inference + output_json_enabled: Whether to extract JSON from the response + event_callback: Optional callback function for workflow events + events_filter: Optional list of event types to filter + + Returns: + tuple: (result, execution_time) + """ + + # Fetch workflow YAML + yaml_content = await self.fetch_workflow_yaml(workflow_name, namespace) + + # Create workflow from YAML + workflow = await self.create_workflow_from_yaml( + yaml_content, workflow_name, access_token, app_key + ) + + # Run inference with optional event streaming + result, execution_time = await self.run_workflow_inference( + workflow, + inputs, + variables, + workflow_name, + output_json_enabled, + event_callback, + events_filter, + ) + + return result, execution_time + + async def perform_inference_v2( + self, + workflow_data: dict, + variables: Dict[str, Any], + inputs: List[BaseMessage] | str, + output_json_enabled: bool = True, + event_callback: Optional[Callable[[AriumEvent], None]] = None, + events_filter: Optional[List[AriumEventType]] = None, + access_token: Optional[str] = None, + app_key: Optional[str] = None, + ) -> tuple[str, float]: + """ + Complete inference workflow (v2): use pre-fetched workflow data, run inference + + Args: + workflow_data: Pre-fetched workflow data dict from workflow_crud_service.get_workflow() + variables: Variables to pass to the workflow + inputs: Inputs to use for inference + output_json_enabled: Whether to extract JSON from the response + event_callback: Optional callback function for workflow events + events_filter: Optional list of event types to filter + + Returns: + tuple: (result, execution_time) + """ + # Extract details from pre-fetched workflow data + namespace = workflow_data['namespace'] + workflow_name = workflow_data['name'] + workflow_id = workflow_data['id'] + + logger.info( + f'Starting v2 inference - namespace: {namespace}, name: {workflow_name}, workflow_id: {workflow_id}' + ) + + yaml_content = await self.fetch_workflow_yaml(workflow_name, namespace) + + # Create workflow from YAML + workflow = await self.create_workflow_from_yaml( + yaml_content, workflow_name, access_token, app_key + ) + + # Run inference with optional event streaming + result, execution_time = await self.run_workflow_inference( + workflow, + inputs, + variables, + workflow_name, + output_json_enabled, + event_callback, + events_filter, + ) + + return result, execution_time diff --git a/wavefront/server/modules/agents_module/agents_module/utils/agent_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/agent_utils.py new file mode 100644 index 00000000..191d81ac --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/agent_utils.py @@ -0,0 +1,32 @@ +""" +Utility functions for agent operations +""" + + +def get_agent_yaml_key(namespace: str, agent_name: str) -> str: + """ + Generate the YAML storage key for an agent + + Args: + namespace: The namespace of the agent + agent_name: The unique identifier for the agent + + Returns: + str: The storage key for the agent YAML file + """ + return f'agents/{namespace}/{agent_name}.yaml' + + +def get_agent_prefix(namespace: str = None) -> str: + """ + Generate the storage prefix for listing agents + + Args: + namespace: Optional namespace to filter agents. If None, returns prefix for all agents + + Returns: + str: The storage prefix for listing agents + """ + if namespace: + return f'agents/{namespace}/' + return 'agents/' diff --git a/wavefront/server/modules/agents_module/agents_module/utils/auth_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/auth_utils.py new file mode 100644 index 00000000..142cda3d --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/auth_utils.py @@ -0,0 +1,26 @@ +from typing import Optional, Tuple +from fastapi import Request + +from user_management_module.constants.auth import RootfloHeaders + + +def extract_auth_credentials(request: Request) -> Tuple[Optional[str], Optional[str]]: + """ + Extract access_token and app_key from request headers. + + Args: + request: FastAPI Request object + + Returns: + Tuple of (access_token, app_key), both can be None if not present + """ + auth_header = request.headers.get('Authorization') + access_token = None + if auth_header: + parts = auth_header.split(' ', 1) + if len(parts) == 2 and parts[0].lower() == 'bearer': + access_token = parts[1] + + app_key = request.headers.get(RootfloHeaders.CLIENT_KEY) + + return access_token, app_key diff --git a/wavefront/server/modules/agents_module/agents_module/utils/cache_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/cache_utils.py new file mode 100644 index 00000000..0087ff0c --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/cache_utils.py @@ -0,0 +1,46 @@ +from typing import Optional +from uuid import UUID + + +def get_agent_by_id_cache_key(agent_id: UUID) -> str: + """Get cache key for agent metadata by ID""" + return f'agent:id:{agent_id}' + + +def get_agent_yaml_cache_key(namespace: str, agent_name: str) -> str: + """Get cache key for agent YAML content""" + return f'agent_yaml:{namespace}:{agent_name}' + + +def get_agents_list_cache_key(namespace: Optional[str] = None) -> str: + """Get cache key for agents list""" + if namespace: + return f'agents_list:namespace:{namespace}' + return 'agents_list:all' + + +def get_namespace_cache_key(namespace_name: str) -> str: + """Get cache key for namespace by name""" + return f'namespace:name:{namespace_name}' + + +def get_namespaces_list_cache_key() -> str: + """Get cache key for namespaces list""" + return 'namespaces:list' + + +def get_workflow_by_id_cache_key(workflow_id: UUID) -> str: + """Get cache key for workflow metadata by ID""" + return f'workflow:id:{workflow_id}' + + +def get_workflow_yaml_cache_key(namespace: str, workflow_name: str) -> str: + """Get cache key for workflow YAML content""" + return f'workflow_yaml:{namespace}:{workflow_name}' + + +def get_workflows_list_cache_key(namespace: Optional[str] = None) -> str: + """Get cache key for workflows list""" + if namespace: + return f'workflows_list:namespace:{namespace}' + return 'workflows_list:all' diff --git a/wavefront/server/modules/agents_module/agents_module/utils/input_processing_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/input_processing_utils.py new file mode 100644 index 00000000..0862965d --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/input_processing_utils.py @@ -0,0 +1,151 @@ +""" +Utility functions for processing inference inputs +""" + +import re +from typing import Any, List, Union +from fastapi import HTTPException, status +from flo_ai import ( + AssistantMessage, + TextMessageContent, + ImageMessageContent, + DocumentMessageContent, + UserMessage, +) +from common_module.log.logger import logger + + +def process_inference_inputs( + inputs: Union[List[dict | str], str], +) -> Union[UserMessage, List[Union[UserMessage, AssistantMessage]]]: + """ + Process inputs for inference, handling both string and list inputs with ImageMessage processing + + Args: + inputs: Input data - can be a string or list containing strings and ImageMessage objects + + Returns: + Union[str, List]: Processed inputs ready for inference + + Raises: + HTTPException: 400 Bad Request if base64 image data is invalid + """ + # Process inputs based on type + if isinstance(inputs, str): + return UserMessage(content=inputs) + else: + resolved_inputs = [] + for input_item in inputs: + if input_item.get('role') == 'assistant': + resolved_inputs.append( + AssistantMessage(content=input_item.get('content')) + ) + elif input_item.get('role') == 'user': + input_content = input_item.get('content', {}) + if is_image_message(input_content): + # Extract image_bytes and mime_type from image_base64 + try: + data_url_pattern = r'^data:(image/[a-zA-Z0-9.+-]+);base64,(.+)$' + match = re.match( + data_url_pattern, input_content.get('image_base64') + ) + if match: + mime_type = match.group(1) + processed_image = UserMessage( + content=ImageMessageContent( + base64=match.group(2), mime_type=mime_type + ) + ) + resolved_inputs.append(processed_image) + else: + resolved_inputs.append( + UserMessage( + content=ImageMessageContent( + base64=input_content.get('image_base64'), + mime_type=input_content.get('mime_type'), + ), + ) + ) + except Exception as e: + logger.error( + f'Error processing ImageMessage base64: {e}, message: {input_item}' + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid base64 image data: {e}', + ) + elif is_doc_message(input_content): + # DocumentMessage - append directly + resolved_inputs.append( + UserMessage( + content=DocumentMessageContent( + base64=input_content.get('document_base64'), + mime_type=input_content.get('mime_type'), + ) + ) + ) + elif is_text_message(input_content): + resolved_inputs.append( + UserMessage( + content=TextMessageContent(text=input_item.get('content')) + ) + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid input: {input_item}', + ) + else: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid input: {input_item}', + ) + + return resolved_inputs + + +def is_image_message(input_item: dict) -> bool: + """ + Check if the input item is an instance of ImageMessage + + Args: + input_item: Input item to check + Returns: + bool: True if input_item is an ImageMessage, False otherwise + + """ + return ( + 'image_url' in input_item + or 'image_base64' in input_item + or 'image_bytes' in input_item + or 'image_file_path' in input_item + ) + + +def is_doc_message(input_item: dict) -> bool: + """ + Check if the input item is an instance of DocumentMessage + + Args: + input_item: Input item to check + Returns: + bool: True if input_item is a DocumentMessage, False otherwise + """ + return ( + 'document_url' in input_item + or 'document_base64' in input_item + or 'document_bytes' in input_item + or 'document_file_path' in input_item + ) + + +def is_text_message(input_item: Any) -> bool: + """ + Check if the input item is an instance of TextMessage + + Args: + input_item: Input item to check + Returns: + bool: True if input_item is a TextMessage, False otherwise + """ + return isinstance(input_item, str) diff --git a/wavefront/server/modules/agents_module/agents_module/utils/validation_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/validation_utils.py new file mode 100644 index 00000000..e2565e43 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/validation_utils.py @@ -0,0 +1,28 @@ +import re + + +def validate_agent_workflow_name(name: str, type: str = 'agent') -> None: + """ + Validate agent or workflow name to ensure it: + - Starts with a letter (a-z, A-Z) + - Contains only letters, numbers, hyphens, and underscores + - No spaces or special characters + + Args: + name: The name to validate + type: Type of entity ('agent' or 'workflow') for error messages + + Raises: + ValueError: If the name contains invalid characters or format + """ + if not name: + raise ValueError(f'{type.capitalize()} name cannot be empty') + + # Must start with a letter, followed by letters, numbers, hyphens, or underscores + pattern = r'^[a-zA-Z][a-zA-Z0-9_-]*$' + + if not re.match(pattern, name): + raise ValueError( + f'{type.capitalize()} name must start with a letter and can only contain letters, numbers, ' + 'hyphens, and underscores. Spaces and special characters are not allowed.' + ) diff --git a/wavefront/server/modules/agents_module/agents_module/utils/workflow_utils.py b/wavefront/server/modules/agents_module/agents_module/utils/workflow_utils.py new file mode 100644 index 00000000..2b243280 --- /dev/null +++ b/wavefront/server/modules/agents_module/agents_module/utils/workflow_utils.py @@ -0,0 +1,48 @@ +""" +Utility functions for workflow operations +""" + + +def get_workflow_yaml_key(namespace: str, workflow_name: str) -> str: + """ + Generate the YAML storage key for a workflow + + Args: + namespace: The namespace of the workflow + workflow_name: The unique identifier for the workflow + + Returns: + str: The storage key for the workflow YAML file + """ + return f'workflows/{namespace}/{workflow_name}.yaml' + + +def get_workflow_id_and_namespace_from_yaml_key(yaml_key: str) -> tuple[str, str]: + """ + Get the workflow ID and namespace from the YAML key + + Args: + yaml_key: The YAML key + + Returns: + tuple[str, str]: The workflow ID and namespace + """ + parts = yaml_key.split('/') + if len(parts) >= 3: + return parts[1], parts[2].replace('.yaml', '') + return None, None + + +def get_workflow_prefix(namespace: str = None) -> str: + """ + Generate the storage prefix for listing workflows + + Args: + namespace: Optional namespace to filter workflows. If None, returns prefix for all workflows + + Returns: + str: The storage prefix for listing workflows + """ + if namespace: + return f'workflows/{namespace}/' + return 'workflows/' diff --git a/wavefront/server/modules/agents_module/pyproject.toml b/wavefront/server/modules/agents_module/pyproject.toml new file mode 100644 index 00000000..3f0a9bc6 --- /dev/null +++ b/wavefront/server/modules/agents_module/pyproject.toml @@ -0,0 +1,37 @@ +[project] +name = "agents-module" +version = "0.1.0" +description = "Agents module for inference" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "flo-cloud", + "flo-utils", + "tools-module", + "api-services-module", + "flo-ai>=1.1.0-rc5", +] + +[tool.uv.sources] +common-module = { workspace = true } +flo-utils = { workspace = true } +flo-cloud = { workspace = true } +tools-module = {workspace = true} +api-services-module = {workspace = true} + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["agents_module"] diff --git a/wavefront/server/modules/agents_module/tests/test_input_processing_utils.py b/wavefront/server/modules/agents_module/tests/test_input_processing_utils.py new file mode 100644 index 00000000..fb858726 --- /dev/null +++ b/wavefront/server/modules/agents_module/tests/test_input_processing_utils.py @@ -0,0 +1,440 @@ +""" +Tests for input_processing_utils module +""" + +import base64 +import pytest +from unittest.mock import patch +from fastapi import HTTPException +from flo_ai import ( + ImageMessageContent, + DocumentMessageContent, + UserMessage, + TextMessageContent, + AssistantMessage, +) + +from agents_module.utils.input_processing_utils import ( + process_inference_inputs, + is_image_message, + is_doc_message, +) + + +class TestProcessInferenceInputs: + """Test cases for process_inference_inputs function""" + + def test_string_input_returns_user_message(self): + """Test that string input is converted to UserMessage""" + input_str = 'This is a test string' + result = process_inference_inputs(input_str) + assert isinstance(result, UserMessage) + assert result.role == 'user' + assert isinstance(result.content, str) + assert result.content == input_str + + def test_empty_string_input(self): + """Test that empty string input is converted to UserMessage""" + input_str = '' + result = process_inference_inputs(input_str) + assert isinstance(result, UserMessage) + assert result.role == 'user' + assert isinstance(result.content, str) + assert result.content == input_str + + def test_empty_list_input(self): + """Test that empty list input returns empty list""" + result = process_inference_inputs([]) + assert result == [] + + def test_list_with_string_only(self): + """Test list containing only string items - should raise error as strings need role""" + inputs = ['Hello', 'World', 'Test'] + # The function expects dicts with 'role' field, so strings in list will raise AttributeError + with pytest.raises(AttributeError): + process_inference_inputs(inputs) + + def test_image_message_with_data_url(self): + """Test processing ImageMessage with data URL format""" + # Create a simple 1x1 pixel PNG in base64 + simple_png_b64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + + image_input = { + 'role': 'user', + 'content': { + 'image_base64': f'data:image/png;base64,{simple_png_b64}', + }, + } + + inputs = [image_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, ImageMessageContent) + assert result[0].content.mime_type == 'image/png' + # base64 field should contain only the base64 part (without data URL prefix) + assert result[0].content.base64 == simple_png_b64 + assert isinstance(result[0].content.base64, str) + + def test_image_message_with_plain_base64(self): + """Test processing ImageMessage with plain base64 (no data URL prefix)""" + simple_png_b64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + + image_input = { + 'role': 'user', + 'content': { + 'image_base64': simple_png_b64, + 'mime_type': 'image/png', + }, + } + + inputs = [image_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, ImageMessageContent) + assert result[0].content.mime_type == 'image/png' + # base64 field should contain the provided base64 string + assert result[0].content.base64 == simple_png_b64 + assert isinstance(result[0].content.base64, str) + + def test_image_message_invalid_base64(self): + """Test that plain base64 (non-data URL) is processed correctly""" + image_input = { + 'role': 'user', + 'content': {'image_base64': 'invalid_base64_data'}, + } + + inputs = [image_input] + + # The pattern won't match, so it falls back to else branch + # and uses the provided image_base64 and mime_type (None) directly + result = process_inference_inputs(inputs) + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, ImageMessageContent) + assert result[0].content.base64 == 'invalid_base64_data' + assert result[0].content.mime_type is None + + def test_image_message_with_none_base64(self): + """Test that None image_base64 raises HTTPException""" + image_input = { + 'role': 'user', + 'content': {'image_base64': None}, + } + + inputs = [image_input] + + # re.match will raise TypeError when given None, which will be caught + # and re-raised as HTTPException + with pytest.raises(HTTPException) as exc_info: + process_inference_inputs(inputs) + + assert exc_info.value.status_code == 400 + assert 'Invalid base64 image data' in str(exc_info.value.detail) + + def test_document_message_pdf(self): + """Test processing DocumentMessage with PDF type""" + # Encode bytes to base64 string as expected by implementation + document_base64_str = base64.b64encode(b'fake_pdf_content').decode('utf-8') + doc_input = { + 'role': 'user', + 'content': { + 'document_base64': document_base64_str, + 'mime_type': 'application/pdf', + }, + } + + inputs = [doc_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, DocumentMessageContent) + assert result[0].content.mime_type == 'application/pdf' + # base64 field should contain base64-encoded string + assert result[0].content.base64 == document_base64_str + + def test_document_message_txt(self): + """Test processing DocumentMessage with TXT type""" + # Encode bytes to base64 string as expected by implementation + document_base64_str = base64.b64encode(b'fake_txt_content').decode('utf-8') + doc_input = { + 'role': 'user', + 'content': { + 'document_base64': document_base64_str, + 'mime_type': 'text/plain', + }, + } + + inputs = [doc_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, DocumentMessageContent) + assert result[0].content.mime_type == 'text/plain' + # base64 field should contain base64-encoded string + assert result[0].content.base64 == document_base64_str + + def test_document_message_default_type(self): + """Test DocumentMessage processing""" + document_base64_str = base64.b64encode(b'content').decode('utf-8') + doc_input = { + 'role': 'user', + 'content': {'document_base64': document_base64_str}, + } + + inputs = [doc_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, DocumentMessageContent) + + def test_mixed_inputs(self): + """Test processing mixed list with text, images, and documents""" + simple_png_b64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + document_base64_str = base64.b64encode(b'pdf_content').decode('utf-8') + + inputs = [ + {'role': 'user', 'content': 'Text input'}, + { + 'role': 'user', + 'content': {'image_base64': f'data:image/png;base64,{simple_png_b64}'}, + }, + {'role': 'user', 'content': 'Another text input'}, + { + 'role': 'user', + 'content': {'document_base64': document_base64_str}, + }, + ] + + result = process_inference_inputs(inputs) + + assert len(result) == 4 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, TextMessageContent) + assert result[0].content.text == 'Text input' + assert isinstance(result[1], UserMessage) + assert isinstance(result[1].content, ImageMessageContent) + assert result[1].content.mime_type == 'image/png' + assert result[1].content.base64 == simple_png_b64 + assert isinstance(result[2], UserMessage) + assert isinstance(result[2].content, TextMessageContent) + assert result[2].content.text == 'Another text input' + assert isinstance(result[3], UserMessage) + assert isinstance(result[3].content, DocumentMessageContent) + + def test_assistant_message(self): + """Test processing AssistantMessage""" + assistant_input = { + 'role': 'assistant', + 'content': 'This is an assistant message', + } + + inputs = [assistant_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], AssistantMessage) + assert result[0].content == 'This is an assistant message' + assert result[0].role == 'assistant' + + @patch('agents_module.utils.input_processing_utils.logger') + def test_image_processing_error_logging(self, mock_logger): + """Test that image processing errors are logged""" + image_input = { + 'role': 'user', + 'content': {'image_base64': None}, + } + + inputs = [image_input] + + with pytest.raises(HTTPException): + process_inference_inputs(inputs) + + mock_logger.error.assert_called_once() + assert 'Error processing ImageMessage base64' in str( + mock_logger.error.call_args + ) + + +class TestIsImageMessage: + """Test cases for is_image_message function""" + + def test_image_url_detected(self): + """Test that image_url key is detected""" + input_item = {'image_url': 'https://example.com/image.png'} + assert is_image_message(input_item) is True + + def test_image_base64_detected(self): + """Test that image_base64 key is detected""" + input_item = {'image_base64': 'base64_data'} + assert is_image_message(input_item) is True + + def test_image_bytes_detected(self): + """Test that image_bytes key is detected""" + input_item = {'image_bytes': b'image_data'} + assert is_image_message(input_item) is True + + def test_image_file_path_detected(self): + """Test that image_file_path key is detected""" + input_item = {'image_file_path': '/path/to/image.png'} + assert is_image_message(input_item) is True + + def test_multiple_image_keys(self): + """Test that multiple image keys are detected""" + input_item = { + 'image_url': 'https://example.com/image.png', + 'image_base64': 'base64_data', + } + assert is_image_message(input_item) is True + + def test_non_image_message(self): + """Test that non-image messages are not detected""" + input_item = {'text': 'This is just text'} + assert is_image_message(input_item) is False + + def test_empty_dict(self): + """Test that empty dict is not detected as image message""" + input_item = {} + assert is_image_message(input_item) is False + + +class TestIsDocMessage: + """Test cases for is_doc_message function""" + + def test_document_url_detected(self): + """Test that document_url key is detected""" + input_item = {'document_url': 'https://example.com/doc.pdf'} + assert is_doc_message(input_item) is True + + def test_document_base64_detected(self): + """Test that document_base64 key is detected""" + input_item = {'document_base64': 'base64_data'} + assert is_doc_message(input_item) is True + + def test_document_bytes_detected(self): + """Test that document_bytes key is detected""" + input_item = {'document_bytes': b'document_data'} + assert is_doc_message(input_item) is True + + def test_document_file_path_detected(self): + """Test that document_file_path key is detected""" + input_item = {'document_file_path': '/path/to/doc.pdf'} + assert is_doc_message(input_item) is True + + def test_multiple_document_keys(self): + """Test that multiple document keys are detected""" + input_item = { + 'document_url': 'https://example.com/doc.pdf', + 'document_base64': 'base64_data', + } + assert is_doc_message(input_item) is True + + def test_non_document_message(self): + """Test that non-document messages are not detected""" + input_item = {'text': 'This is just text'} + assert is_doc_message(input_item) is False + + def test_empty_dict(self): + """Test that empty dict is not detected as document message""" + input_item = {} + assert is_doc_message(input_item) is False + + +class TestEdgeCases: + """Test edge cases and error conditions""" + + def test_none_input(self): + """Test that None input is handled gracefully""" + with pytest.raises(TypeError): + process_inference_inputs(None) + + def test_integer_input(self): + """Test that integer input raises appropriate error""" + with pytest.raises(TypeError): + process_inference_inputs(123) + + def test_dict_input_not_list_or_string(self): + """Test that dict input (not list or string) raises error""" + # The function expects either str or List, so a dict should raise TypeError + with pytest.raises((TypeError, AttributeError)): + process_inference_inputs({'key': 'value'}) + + def test_nested_list_input(self): + """Test that nested lists are handled""" + inputs = [ + {'role': 'user', 'content': 'nested'}, + {'role': 'user', 'content': 'string'}, + ] + result = process_inference_inputs(inputs) + assert len(result) == 2 + assert isinstance(result[0], UserMessage) + assert isinstance(result[1], UserMessage) + + def test_image_message_with_malformed_data_url(self): + """Test image message with malformed data URL - should fall back to else branch""" + image_input = { + 'role': 'user', + 'content': {'image_base64': 'data:invalid_format'}, + } + + inputs = [image_input] + + # The pattern won't match, so it falls back to else branch + # and uses the provided image_base64 and mime_type (None) directly + result = process_inference_inputs(inputs) + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, ImageMessageContent) + assert result[0].content.base64 == 'data:invalid_format' + assert result[0].content.mime_type is None + + def test_document_message_with_none_values(self): + """Test document message with None values""" + doc_input = { + 'role': 'user', + 'content': { + 'document_base64': None, + 'mime_type': None, + }, + } + + inputs = [doc_input] + result = process_inference_inputs(inputs) + + assert len(result) == 1 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, DocumentMessageContent) + + def test_image_message_image_and_string(self): + """Test processing ImageMessage with text messages""" + simple_png_b64 = 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + + image_input = { + 'role': 'user', + 'content': { + 'image_base64': f'data:image/svg+xml;base64,{simple_png_b64}', + }, + } + + inputs = [ + {'role': 'user', 'content': 'Validation Cruel'}, + image_input, + {'role': 'user', 'content': 'Validation Cruel'}, + ] + result = process_inference_inputs(inputs) + + assert len(result) == 3 + assert isinstance(result[0], UserMessage) + assert isinstance(result[0].content, TextMessageContent) + assert isinstance(result[1], UserMessage) + assert isinstance(result[1].content, ImageMessageContent) + assert result[1].content.mime_type == 'image/svg+xml' + assert result[1].content.base64 == simple_png_b64 + assert isinstance(result[2], UserMessage) + assert isinstance(result[2].content, TextMessageContent) diff --git a/wavefront/server/modules/api_services_module/README.md b/wavefront/server/modules/api_services_module/README.md new file mode 100644 index 00000000..1cbf1cf2 --- /dev/null +++ b/wavefront/server/modules/api_services_module/README.md @@ -0,0 +1,361 @@ +Goal +Build a configurable, enterprise-grade API proxy middleware that acts as an intelligent gateway between clients and multiple backend services. The middleware handles routing, authentication injection, and request/response transformationโ€”all driven by declarative YAML configuration, eliminating the need for code changes when adding new backends or services. +Problem Statement +Organizations often need to: +Expose internal/external APIs through a unified interface +Manage authentication for multiple heterogeneous backend services (OAuth2, API keys, Basic Auth, etc.) +Avoid duplicating auth logic across client applications +Centralize request routing and transformation logic +Support multiple authentication schemes without code changes +Current solutions require either custom code for each backend or complex infrastructure. We need a simple, configuration-driven middleware. +Core Features +1. Proxy Layer +Forward HTTP requests (GET, POST, PUT, DELETE, PATCH) to configured backends +Preserve client headers and query parameters +Support request body forwarding +Return backend response as-is or transformed +2. Routing & Service Registry +Route incoming /api/{service_name} requests to correct backend +Support dynamic service-to-backend mapping via YAML +Handle multiple backends with different base URLs +Support service aliasing and versioning (future) +3. Authentication Layer +Bearer Token: Static token injection +Basic Auth: Username/password encoding +API Key: Custom header injection +OAuth2: Client credentials flow with auto-refresh and caching +HMAC: Signature generation for request validation +AWS SigV4: AWS request signing (future) +Automatic header merging with client headers +4. Token Management +In-memory token cache with expiration tracking +Automatic OAuth2 token refresh before expiry +Graceful fallback on auth failures +Architecture +High-Level Components + +Client Request + โ†“ +FastAPI Router + โ†“ +Service Registry (YAML configs) + โ†“ +Auth Manager (AuthManager class) + โ”œโ”€ Bearer Handler + โ”œโ”€ Basic Auth Handler + โ”œโ”€ API Key Handler + โ”œโ”€ OAuth2 Handler + โ”œโ”€ HMAC Handler + โ””โ”€ [Pluggable handlers] + โ†“ +HTTP Client (httpx with retry/timeout) + โ†“ +Backend Service + โ†“ +Response (return to client) + +Key Components +service-config.yaml โ€“ Backend and service definitions +AuthManager โ€“ Central auth orchestration +Auth Handlers โ€“ Pluggable auth type implementations +Proxy Router โ€“ FastAPI route handler + +Implementation +Phase 1 (MVP - 2 weeks, ~10 days effort) +Service Definition Standard(Auth Layer & Services/APIs) + +Define Yaml based service definition( named as -service-definition.yaml ). One middleware app can have multiple services configured, meaning multiple service definitions available. Each service definition is grouped into logical groups called services. +egs: crm-service-definition.yaml, gupsupp-service-defintion.yaml etc + +Services -> (Auth + CRM Services) + (Auth + Marketing services) + (Auth + CPaaS Services) + +Each service defintion will have the following configurations: +Authentication Configurations: To to authenticate and authorizer to the backend service +Service/API configurations: The APIs that are exposed to the client from backend + +Service Schema: +service: + # ID has to be unique and should not contain spaces + id: + # (base url of the servvice) + base_url: string + + auth: + + apis: + + +Authentication Configurations +We only plan to support 3 types of autheticators (Bearer, Basic, API Key) in Phase 1. The authentication configuration will have the following things: + +The auth configuration based on the type of auth choosen by the user +Additional headers to be added if any. +Pre execution script which can get executed, just before sending the request (pre-processor) + +Auth Schema: +auth: + # id of the authenticator (eg: facebook-auth-service) + id: string + # can be v1 or v2, defaulting to v1 if not provided + version: string + type: string (bearer | basic | api_key) + #(Optional if the auth service is hosted seperately) + base_url: string | null + path: string (path to the auth API) + + # Auth type-specific configuration + # (see examples below) + + # Additional static headers to inject (optional) + additional_headers: + {header_name}: string + {header_name}: string + # Examples: X-Custom-Header, X-Request-ID, etc. + + # Pre-execution script (preprocessor) + preprocessor: + enabled: boolean (default: false) + script_path: string (path to script) + +Examples: + + +The auth excution hierarachy: +(Request Start) + -> Authenticator (based on type) + -> Add additional headers (if any) + -> Auth Pre-Processor + -> (Request is Handed over to APIs Implementation) +Note: Generally authenticators are part of the overall APIs, and they are called as part of the API call. But if the client wants to directly utilize the auth API, they should be able to call /services//authenticators// +Post-Processor Schema + +For the authenticator the post processor function with look something like this, in terms of definition: +const main = (auth_config, request_context) => { + """ + Preprocessor function that modifies auth config before request. + + Args: + auth_config: Current auth configuration dict (type, token, or other authenticator specific values + request_context: Request metadata (method, path, etc.) + + Returns: + dict: { + \"auth_config\": modified auth_config, + \"additional_headers\": dict of headers to add + } + """ +} + +Please find example auth preprocessors inside +API/Service Configurations +This part defines the APIs and how to call them. The API configurations will have the following parts: + +The URL path of the API to call (the base_url comes from service level base_url) +The ID of the API +Any additional headers +Output mapper: This helps transform the payload of the backend API, if required +Pre-processor script: The script to modify request before sending to backend +Post-processor script: The script for modifying payload or response to client. + +API Schema: +apis: + - id: string (id of the API) + version: string (optional with default to v1) + path: string (e.g., /users, /transactions/{id}) + method: string (GET | POST | PUT | DELETE | PATCH) + + # Optional: Additional headers specific to this API + additional_headers: + {header_name}: string + {header_name}: string + + # Optional: Preprocess/modify request before sending to backend + preprocessor: + enabled: boolean (default: false) + script_path: string (path to Python script) + # Modifies request body, path parameters, headers + + # Optional: Transform backend response payload + output_mapper: + enabled: boolean (default: false) + mapper: + : + : + : + # Maps response fields from backend to client format + + # Optional: Postprocess/modify response before sending to client + postprocessor: + enabled: boolean (default: false) + script_path: string (path to Python script) + # Modifies response body, headers, status code + +Examples: + + +The auth excution hierarachy: +(Request Start) + -> Authenticator (based on type) + -> Add additional headers (if any) + -> Auth Pre-Processor + ............ auth layer ends & api layer starts ......... + -> API preprocessor + -> Add additional API headers + -> Request Preprocessor + -> Response Sent to Backend + -> Response Output Mapper + -> Response Post processor + -> Response sent to client +Here is the of full Service Definition Schema +service: + # ID has to be unique and should not contain spaces + id: + # (base url of the servvice) + base_url: string + + auth: + # id of the authenticator (eg: facebook-auth-service) + id: string + # can be v1 or v2, defaulting to v1 if not provided + version: string + type: string (bearer | basic | api_key) + #(Optional if the auth service is hosted seperately) + base_url: string | null + path: string (path to the auth API) + + # Auth type-specific configuration + # (see examples below) + + # Additional static headers to inject (optional) + additional_headers: + {header_name}: string + {header_name}: string + # Examples: X-Custom-Header, X-Request-ID, etc. + + # Pre-execution script (preprocessor) + preprocessor: + enabled: boolean (default: false) + script_path: string (path to script) + apis: + - id: string (id of the API) + version: string (optional with default to v1) + path: string (e.g., /users, /transactions/{id}) + method: string (GET | POST | PUT | DELETE | PATCH) + + # Optional: Additional headers specific to this API + additional_headers: + {header_name}: string + {header_name}: string + + # Optional: Preprocess/modify request before sending to backend + preprocessor: + enabled: boolean (default: false) + script_path: string (path to Python script) + # Modifies request body, path parameters, headers + + # Optional: Transform backend response payload + output_mapper: + enabled: boolean (default: false) + mapper: + : + : + : + # Maps response fields from backend to client format + + # Optional: Postprocess/modify response before sending to client + postprocessor: + enabled: boolean (default: false) + script_path: string (path to Python script) + # Modifies response body, headers, status code +API from Client +The API call from client to the backend API will be: +POST /floware/v1/services//apis//? +All request will be POST, as other verbs are configured at backend. +Response Schema: +{ + "meta": { + "status": , + "message": , + "trace": (Refer Task Breakdown Task No: 9) + } + "data" +} +Service Layer Implementation. + +The implementation of the Service Layer involves following components: +Service registry: Where the service definitions are maintained, basically CRUD for all the yaml definitions +Service Definition Parser: A parser class to parser and convert service definitions to POJO classes +Core proxy pipeline: The main middleware router/proxy which does the execution. This have multiple subcomponents +Auth Manager: Manages different kinds of auth +Service Manager: Manages different kinds of api calls +Error handling & logging: Handling error and forwarding the same to client +Unit Testing: Unit test the code functionalities thoroughly + +Most of the stuff is straight forward, except for Core Proxy Pipeline, which is the code component. Let's elaborate on this. + +Core proxy pipeline + +Implementation wise and design pattern wise, what we have at hand is a pipeline, The pipeline gets triggered when an API call is hit. We can call it a pipeline because multiple things happen in sequence (at least from the facade layer it looks like sequence, even though we can optimise later). So every service definition has to become a pipeline, which is then runtime cached to process the API requests + +Auth pipeline +[Authenticator โ†’ Header Injector โ†’ Pre-processor ] + +API Pipeline +[API processor โ†’ Header Injector โ†’ Preprocessor โ†’ Request to Backend โ†’ Mapper โ†’post processor] + +Service Pipeline +[Auth pipeline โ†’ API Pipeline] + +Usage composite pattern to build up. Also all components of the pipeline should be of same type, ie child class of pipeline protocol. + +Example Protocol (for reference only): +class PipelineStage(ABC): + """ + Abstract base class for all pipeline components. + + All pipeline stages (atomic and composite) must implement this protocol. + This ensures uniform behavior across the entire pipeline architecture. + """ + + @abstractmethod + def execute(self, context: PipelineContext) -> PipelineContext: + """ + Execute the pipeline stage. + + Args: + context: Pipeline context that flows through stages + + Returns: + Modified context (same object, modified in-place) + + Raises: + PipelineException: If any stage fails + """ + pass + + @abstractmethod + def get_stage_type(self) -> StageType: + """Get the type of this pipeline stage. stage is the current pipeline node name""" + pass + + @abstractmethod + def get_name(self) -> str: + """Get the name/identifier of this pipeline""" + pass + +Phase 1: Task Breakdown + +Please reach out to for any questions +Appendix + +Phase 2 (OAuth2 & Extensibility - 2 weeks, ~25 days) + +OAuth2 with token caching/refresh +HMAC authentication +Pluggable auth architecture +Retry & timeout strategies +Rate limits +Comprehensive testing +Observability & metrics diff --git a/wavefront/server/modules/api_services_module/api_services_module/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/__init__.py new file mode 100644 index 00000000..088a8944 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/__init__.py @@ -0,0 +1,44 @@ +""" +Floware API Services Module + +Enterprise-grade API proxy middleware that acts as an intelligent gateway +between clients and multiple backend services. Handles routing, authentication +injection, and request/response transformation through declarative YAML configuration. +""" + +from .core.proxy import ApiProxy +from .core.router import ProxyRouter +from .config.registry import ServiceRegistry +from .config.parser import ServiceDefinitionParser +from .auth.manager import AuthManager +from .models.service import ServiceDefinition, AuthConfig, ApiConfig, ProxyResponse +from .models.pipeline import PipelineContext, PipelineStage, CompositePipelineStage +from .api_services_container import ApiServicesContainer, create_api_services_container +from .execution.execute import execute_api_service + +__version__ = '1.0.0' +__author__ = 'Floware Team' + +__all__ = [ + # Core components + 'ApiProxy', + 'ProxyRouter', + # Configuration + 'ServiceRegistry', + 'ServiceDefinitionParser', + # Authentication + 'AuthManager', + # Models + 'ServiceDefinition', + 'AuthConfig', + 'ApiConfig', + 'ProxyResponse', + 'PipelineContext', + 'PipelineStage', + 'CompositePipelineStage', + # Dependency Injection + 'ApiServicesContainer', + 'create_api_services_container', + # Utility Functions + 'execute_api_service', +] diff --git a/wavefront/server/modules/api_services_module/api_services_module/api_services_container.py b/wavefront/server/modules/api_services_module/api_services_module/api_services_container.py new file mode 100644 index 00000000..07a4aa53 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/api_services_container.py @@ -0,0 +1,144 @@ +"""Dependency injection container for API services module.""" + +from dependency_injector import containers, providers + +from .config.registry import ServiceRegistry +from .config.parser import ServiceDefinitionParser +from .auth.manager import AuthManager +from .core.proxy import ApiProxy +from .core.router import ProxyRouter +from .core.manager import ApiServicesManager +from .pipeline.builder import PipelineBuilder, PipelineCache +from .utils.api_change_processor import ApiChangeProcessor +from .utils.api_change_publisher import ApiChangePublisher + + +def _initialize_service_registry(service_registry: ServiceRegistry) -> ServiceRegistry: + """Initialize service registry (without loading from DB - that happens in startup event).""" + # Don't load from DB here - it's async and container initialization is sync + # DB loading will happen in the startup event handler + return service_registry + + +class ApiServicesContainer(containers.DeclarativeContainer): + """Dependency injection container for API services module.""" + + # Configuration + config = providers.Configuration() + + # External dependencies (can be injected from parent containers) + db_client = providers.Dependency() + cache_manager = providers.Dependency() + response_formatter = providers.Dependency() + + # api services repository + api_services_repository = providers.Dependency() + + # cache + cloud_storage_manager = providers.Dependency() + + # Core service components + service_definition_parser = providers.Singleton(ServiceDefinitionParser) + + api_service_manager = providers.Singleton( + ApiServicesManager, + api_services_repository=api_services_repository, + cloud_storage_manager=cloud_storage_manager, + cache_manager=cache_manager, + config=config, + ) + + api_change_publisher = providers.Singleton( + ApiChangePublisher, + cache_manager=cache_manager, + ) + + service_registry = providers.Singleton( + ServiceRegistry, api_service_manager=api_service_manager + ) + + # Initialize service registry with loaded configurations + initialized_service_registry = providers.Singleton( + lambda service_registry: _initialize_service_registry(service_registry), + service_registry=service_registry, + ) + + auth_manager = providers.Singleton(AuthManager) + + pipeline_builder = providers.Singleton(PipelineBuilder) + + pipeline_cache = providers.Singleton(PipelineCache) + + # Main API proxy (using initialized service registry) + api_proxy = providers.Singleton( + ApiProxy, + service_registry=initialized_service_registry, + api_services_manager=api_service_manager, + api_change_publisher=api_change_publisher, + ) + + # Router (using initialized service registry) + proxy_router = providers.Singleton( + ProxyRouter, + proxy=api_proxy, + service_registry=initialized_service_registry, + api_services_manager=api_service_manager, + response_formatter=response_formatter, + ) + + api_change_processor = providers.Singleton( + ApiChangeProcessor, + proxy_router=proxy_router, + ) + + # Router factory method + router = providers.Callable( + lambda proxy_router: proxy_router.get_router(), proxy_router=proxy_router + ) + + initialized_proxy = providers.Singleton( + lambda api_proxy, service_registry: api_proxy, + api_proxy=api_proxy, + service_registry=initialized_service_registry, + ) + + +def create_api_services_container( + api_service_repository, + cloud_storage_manager, + response_formatter, + db_client=None, + cache_manager=None, +) -> ApiServicesContainer: + """ + Factory function to create and configure API services container. + + Args: + api_service_repository: Repository for api service metadata + cloud_storage_manager: Cloud storage manager for service definitions + db_client: Database client (optional, for future use) + cache_manager: Cache manager (optional, for future use) + + Returns: + Configured ApiServicesContainer + """ + container = ApiServicesContainer( + api_services_repository=api_service_repository, + cloud_storage_manager=cloud_storage_manager, + response_formatter=response_formatter, + ) + + # Wire external dependencies if provided + if db_client: + container.db_client.override(db_client) + if cache_manager: + container.cache_manager.override(cache_manager) + + container.wire( + modules=[__name__], + packages=[ + 'api_services_module.execution', + ], + ) + + return container diff --git a/wavefront/server/modules/api_services_module/api_services_module/auth/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/auth/__init__.py new file mode 100644 index 00000000..8812c0b2 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/auth/__init__.py @@ -0,0 +1 @@ +"""Authentication package for API services module.""" diff --git a/wavefront/server/modules/api_services_module/api_services_module/auth/handlers.py b/wavefront/server/modules/api_services_module/api_services_module/auth/handlers.py new file mode 100644 index 00000000..edc74d64 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/auth/handlers.py @@ -0,0 +1,120 @@ +"""Authentication handlers for different auth types.""" + +import base64 +from typing import Dict +from abc import abstractmethod + +from ..models.pipeline import ( + PipelineStage, + PipelineContext, + StageType, + PipelineException, +) +from ..models.service import AuthType, AuthConfig + + +class AuthHandler(PipelineStage): + """Base class for authentication handlers.""" + + def __init__(self, auth_config: AuthConfig): + self.auth_config = auth_config + + def get_stage_type(self) -> StageType: + """Return authenticator stage type.""" + return StageType.AUTHENTICATOR + + def get_name(self) -> str: + """Return the authenticator name.""" + return f'{self.auth_config.type.value}_authenticator' + + @abstractmethod + def generate_auth_headers(self, context: PipelineContext) -> Dict[str, str]: + """Generate authentication headers based on auth type.""" + pass + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Execute authentication stage.""" + context.add_trace( + self.get_name(), f'Starting {self.auth_config.type.value} authentication' + ) + + try: + # Generate auth-specific headers + auth_headers = self.generate_auth_headers(context) + + # Add additional headers from config + auth_headers.update(self.auth_config.additional_headers) + + # Store auth headers in context + context.auth_headers.update(auth_headers) + context.merge_backend_headers(auth_headers) + + context.add_trace( + self.get_name(), 'Authentication headers generated successfully' + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Authentication failed: {str(e)}') + raise PipelineException( + f'Authentication failed: {str(e)}', self.get_name(), context + ) + + +class BearerAuthHandler(AuthHandler): + """Bearer token authentication handler.""" + + def __init__(self, auth_config: AuthConfig): + super().__init__(auth_config) + if not auth_config.token: + raise ValueError('Bearer auth requires a token') + + def generate_auth_headers(self, context: PipelineContext) -> Dict[str, str]: + """Generate Bearer token headers.""" + return {'Authorization': f'Bearer {self.auth_config.token}'} + + +class BasicAuthHandler(AuthHandler): + """Basic authentication handler.""" + + def __init__(self, auth_config: AuthConfig): + super().__init__(auth_config) + if not auth_config.username or not auth_config.password: + raise ValueError('Basic auth requires username and password') + + def generate_auth_headers(self, context: PipelineContext) -> Dict[str, str]: + """Generate Basic auth headers.""" + credentials = f'{self.auth_config.username}:{self.auth_config.password}' + encoded_credentials = base64.b64encode(credentials.encode()).decode() + + return {'Authorization': f'Basic {encoded_credentials}'} + + +class ApiKeyAuthHandler(AuthHandler): + """API Key authentication handler.""" + + def __init__(self, auth_config: AuthConfig): + super().__init__(auth_config) + if not auth_config.api_key: + raise ValueError('API Key auth requires an api_key') + + def generate_auth_headers(self, context: PipelineContext) -> Dict[str, str]: + """Generate API Key headers.""" + return {self.auth_config.api_key_header: self.auth_config.api_key} + + +class AuthHandlerFactory: + """Factory for creating authentication handlers.""" + + @staticmethod + def create_handler(auth_config: AuthConfig) -> AuthHandler: + """Create appropriate auth handler based on auth type.""" + + if auth_config.type == AuthType.BEARER: + return BearerAuthHandler(auth_config) + elif auth_config.type == AuthType.BASIC: + return BasicAuthHandler(auth_config) + elif auth_config.type == AuthType.API_KEY: + return ApiKeyAuthHandler(auth_config) + else: + raise ValueError(f'Unsupported auth type: {auth_config.type}') diff --git a/wavefront/server/modules/api_services_module/api_services_module/auth/manager.py b/wavefront/server/modules/api_services_module/api_services_module/auth/manager.py new file mode 100644 index 00000000..01322bcb --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/auth/manager.py @@ -0,0 +1,62 @@ +"""Authentication manager for orchestrating auth handlers.""" + +from typing import Dict, Optional +from ..models.service import ServiceDefinition +from ..models.pipeline import PipelineContext +from .handlers import AuthHandler, AuthHandlerFactory + + +class AuthManager: + """ + Central authentication manager that orchestrates different auth handlers. + + Manages authentication for multiple services and caches auth handlers + for performance. + """ + + def __init__(self): + self._auth_handlers: Dict[str, AuthHandler] = {} + + def register_service_auth(self, service_definition: ServiceDefinition): + """Register authentication handler for a service.""" + auth_key = f'{service_definition.id}:{service_definition.auth.version}' + + if auth_key not in self._auth_handlers: + handler = AuthHandlerFactory.create_handler(service_definition.auth) + self._auth_handlers[auth_key] = handler + + def get_auth_handler( + self, service_id: str, auth_version: str = 'v1' + ) -> Optional[AuthHandler]: + """Get authentication handler for a service.""" + auth_key = f'{service_id}:{auth_version}' + return self._auth_handlers.get(auth_key) + + def authenticate(self, context: PipelineContext) -> PipelineContext: + """ + Authenticate a request using the appropriate handler. + + Args: + context: Pipeline context containing service information + + Returns: + Modified context with authentication headers + + Raises: + PipelineException: If authentication fails + """ + auth_handler = self.get_auth_handler(context.service_id) + + if not auth_handler: + raise ValueError(f'No auth handler found for service: {context.service_id}') + + return auth_handler.execute(context) + + def clear_cache(self): + """Clear all cached auth handlers.""" + self._auth_handlers.clear() + + def remove_service_auth(self, service_id: str, auth_version: str = 'v1'): + """Remove auth handler for a specific service.""" + auth_key = f'{service_id}:{auth_version}' + self._auth_handlers.pop(auth_key, None) diff --git a/wavefront/server/modules/api_services_module/api_services_module/config/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/config/__init__.py new file mode 100644 index 00000000..78abe0e0 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/config/__init__.py @@ -0,0 +1 @@ +"""Configuration package for API services module.""" diff --git a/wavefront/server/modules/api_services_module/api_services_module/config/parser.py b/wavefront/server/modules/api_services_module/api_services_module/config/parser.py new file mode 100644 index 00000000..a86f4660 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/config/parser.py @@ -0,0 +1,154 @@ +"""YAML service definition parser.""" + +import yaml +from typing import Dict, Any, List +from ..models.service import ( + ServiceDefinition, + AuthConfig, + ApiConfig, + AuthType, + HttpMethod, +) + + +class ServiceDefinitionParser: + """Parser for YAML service definition files.""" + + @staticmethod + def parse_yaml_string(yaml_content: str) -> ServiceDefinition: + """ + Parse a YAML service definition from string. + + Args: + yaml_content: YAML content as string + + Returns: + ServiceDefinition object + """ + try: + yaml_data = yaml.safe_load(yaml_content) + except yaml.YAMLError as e: + raise TypeError(f'Invalid YAML content: {str(e)}') + + return ServiceDefinitionParser._parse_service_data(yaml_data) + + @staticmethod + def _parse_service_data(yaml_data: Dict[str, Any]) -> ServiceDefinition: + """Parse service data from loaded YAML.""" + if 'service' not in yaml_data: + raise ValueError("Missing 'service' root key in YAML") + + service_data = yaml_data['service'] + + # Parse required fields + service_id = service_data.get('id') + base_url = service_data.get('base_url') + + if not service_id: + raise ValueError('Missing required field: service.id') + if not base_url: + raise ValueError('Missing required field: service.base_url') + + # Parse authentication config + auth_config = ServiceDefinitionParser._parse_auth_config( + service_data.get('auth', {}) + ) + + # Parse API configs + api_configs = ServiceDefinitionParser._parse_api_configs( + service_data.get('apis', []) + ) + + return ServiceDefinition( + id=service_id, base_url=base_url, auth=auth_config, apis=api_configs + ) + + @staticmethod + def _parse_auth_config(auth_data: Dict[str, Any]) -> AuthConfig: + """Parse authentication configuration.""" + auth_id = auth_data.get('id', 'default-auth') + auth_type_str = auth_data.get('type', '').lower() + + # Validate auth type + try: + auth_type = AuthType(auth_type_str) + except ValueError: + raise ValueError( + f'Invalid auth type: {auth_type_str}. Must be one of: {[t.value for t in AuthType]}' + ) + + auth_config = AuthConfig( + id=auth_id, + type=auth_type, + version=auth_data.get('version', 'v1'), + base_url=auth_data.get('base_url'), + path=auth_data.get('path', ''), + additional_headers=auth_data.get('additional_headers', {}), + ) + + # Set auth-specific fields based on type + if auth_type == AuthType.BEARER: + auth_config.token = auth_data.get('token') + if not auth_config.token: + raise ValueError("Bearer auth requires 'token' field") + + elif auth_type == AuthType.BASIC: + auth_config.username = auth_data.get('username') + auth_config.password = auth_data.get('password') + if not auth_config.username or not auth_config.password: + raise ValueError("Basic auth requires 'username' and 'password' fields") + + elif auth_type == AuthType.API_KEY: + auth_config.api_key = auth_data.get('api_key') + auth_config.api_key_header = auth_data.get('api_key_header', 'X-API-Key') + if not auth_config.api_key: + raise ValueError("API Key auth requires 'api_key' field") + + return auth_config + + @staticmethod + def _parse_api_configs(apis_data: List[Dict[str, Any]]) -> List[ApiConfig]: + """Parse API configurations.""" + api_configs = [] + + for api_data in apis_data: + api_id = api_data.get('id') + # Exposed path (required) + path = api_data.get('path') + # Backend path (required) + backend_path = api_data.get('backend_path') + method_str = api_data.get('method', 'GET').upper() + + if not api_id: + raise ValueError('API missing required field: id') + if not path: + raise ValueError( + f"API '{api_id}' missing required field: path (exposed)" + ) + if not backend_path: + raise ValueError(f"API '{api_id}' missing required field: backend_path") + + # Validate HTTP method + try: + method = HttpMethod(method_str) + except ValueError: + raise ValueError( + f'Invalid HTTP method: {method_str}. Must be one of: {[m.value for m in HttpMethod]}' + ) + + api_config = ApiConfig( + id=api_id, + path=path, + backend_path=backend_path, + method=method, + version=api_data.get('version', 'v1'), + additional_headers=api_data.get('additional_headers', {}), + output_mapper_enabled=api_data.get('output_mapper', {}).get( + 'enabled', False + ), + output_mapper=api_data.get('output_mapper', {}).get('mapper', {}), + ) + + api_configs.append(api_config) + + return api_configs diff --git a/wavefront/server/modules/api_services_module/api_services_module/config/registry.py b/wavefront/server/modules/api_services_module/api_services_module/config/registry.py new file mode 100644 index 00000000..f5282fb4 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/config/registry.py @@ -0,0 +1,170 @@ +"""Service registry for managing service definitions.""" + +from typing import Dict, List, Optional +from ..models.service import ServiceDefinition +from .parser import ServiceDefinitionParser +from common_module.log.logger import logger +from ..core.manager import ApiServicesManager + + +class ServiceRegistry: + """ + Registry for managing service definitions. + + Provides CRUD operations for service definitions and handles + loading from YAML files. + """ + + def __init__(self, api_service_manager: Optional[ApiServicesManager] = None): + """ + Initialize service registry. + + Args: + config_directory: Directory containing service definition files + """ + self.api_service_manager = api_service_manager + + self._services: Dict[str, ServiceDefinition] = {} + self.parser = ServiceDefinitionParser() + + def register_service(self, service_definition: ServiceDefinition): + """ + Register a service definition. + + Args: + service_definition: Service definition to register + """ + self._services[service_definition.id] = service_definition + + def deregister_service(self, service_id: str): + """ + Register a service definition. + + Args: + service_definition: Service definition to register + """ + self._services.pop(service_id, None) + logger.info(f'Service removed from runtime: {service_id}') + + def get_service(self, service_id: str) -> Optional[ServiceDefinition]: + """ + Get a service definition by ID. + + Args: + service_id: Service identifier + + Returns: + ServiceDefinition if found, None otherwise + """ + return self._services.get(service_id) + + def get_all_services(self) -> Dict[str, ServiceDefinition]: + """Get all registered service definitions.""" + return self._services.copy() + + def get_service_ids(self) -> List[str]: + """Get list of all registered service IDs.""" + return list(self._services.keys()) + + def remove_service(self, service_id: str) -> bool: + """ + Remove a service definition. + + Args: + service_id: Service identifier + + Returns: + True if service was removed, False if not found + """ + if service_id in self._services: + del self._services[service_id] + return True + return False + + def _ensure_manager(self): + if not self.api_service_manager: + raise RuntimeError('API services manager is not configured') + + async def load_from_db(self): + self._ensure_manager() + + services = await self.api_service_manager.get_all_api_services() or [] + self.clear_all() + + for service in services: + yaml_content = self.api_service_manager.fetch_service_def(service) + service_definition = self.parser.parse_yaml_string(yaml_content) + self.register_service(service_definition) + + logger.info(f'Loaded {len(self._services)} service definitions from db') + + async def load_service_from_db(self, service_id: str): + self._ensure_manager() + + service = await self.api_service_manager.get_api_service(id=service_id) + if not service: + raise ValueError(f'No service definition found for service: {service_id}') + service_def_yaml = self.api_service_manager.fetch_service_def(service) + service_def = self.parser.parse_yaml_string(service_def_yaml) + self.register_service(service_definition=service_def) + + logger.info(f'Loaded service from db: {service_id}') + + async def reload_service(self, service_id: str): + """ + Reload a specific service definition. + + Args: + service_id: Service to reload + file_path: Optional specific file path, otherwise searches config directory + """ + self._ensure_manager() + await self.load_service_from_db(service_id) + + def validate_service(self, service_id: str) -> bool: + """ + Validate that a service definition is complete and valid. + + Args: + service_id: Service to validate + + Returns: + True if valid, False otherwise + """ + service = self.get_service(service_id) + if not service: + return False + + # Basic validation + if not service.id or not service.base_url: + return False + + if not service.auth or not service.auth.type: + return False + + # Validate at least one API is defined + if not service.apis: + return False + + # Validate each API + for api in service.apis: + if not api.id or not api.path or not api.method: + return False + + return True + + def clear_all(self): + """Clear all registered services.""" + self._services.clear() + + def get_stats(self) -> Dict[str, int]: + """Get registry statistics.""" + total_apis = sum(len(service.apis) for service in self._services.values()) + + return { + 'total_services': len(self._services), + 'total_apis': total_apis, + 'auth_types': len( + set(service.auth.type for service in self._services.values()) + ), + } diff --git a/wavefront/server/modules/api_services_module/api_services_module/core/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/core/__init__.py new file mode 100644 index 00000000..07e8a3bd --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/core/__init__.py @@ -0,0 +1 @@ +"""Core package for API services module.""" diff --git a/wavefront/server/modules/api_services_module/api_services_module/core/manager.py b/wavefront/server/modules/api_services_module/api_services_module/core/manager.py new file mode 100644 index 00000000..760b353b --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/core/manager.py @@ -0,0 +1,101 @@ +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.api_services import ApiServices +from flo_cloud.cloud_storage import CloudStorageManager +from typing import List +from api_services_module.env import SERVICE_DEFINITION_BUCKET + + +class ApiServicesManager: + """Manager for API services.""" + + def __init__( + self, + api_services_repository: SQLAlchemyRepository[ApiServices], + cloud_storage_manager: CloudStorageManager, + cache_manager: CacheManager, + config: dict, + ): + """Initialize the API services manager.""" + self.config = config + self.cache_manager = cache_manager + self.api_services_repository = api_services_repository + self.cloud_storage_manager = cloud_storage_manager + + async def create_api_service( + self, id: str, service_def_yaml: str + ) -> ApiServices | None: + """Create a new API service.""" + service_def_path = f'api_services/{id}.yaml' + self.cloud_storage_manager.save_small_file( + file_content=service_def_yaml.encode('utf-8'), + bucket_name=self._service_storage_bucket(), + key=service_def_path, + content_type='application/yaml', + ) + self.cache_manager.add(service_def_path, service_def_yaml) + return await self.api_services_repository.create( + id=id, service_def_path=service_def_path + ) + + def fetch_service_def(self, api_services: ApiServices) -> str: + service_def_path = f'api_services/{api_services.id}.yaml' + cache_entry = self.cache_manager.get_str(service_def_path) + if cache_entry: + return cache_entry + yaml_bytes: bytes = self.cloud_storage_manager.read_file( + bucket_name=self._service_storage_bucket(), file_path=service_def_path + ) + yaml_content = yaml_bytes.decode('utf-8') + return yaml_content + + async def get_api_service(self, id: str) -> ApiServices | None: + """Get an API service by id.""" + return await self.api_services_repository.find_one(id=id) + + async def get_all_api_services(self) -> List[ApiServices] | None: + """Get all API services.""" + return await self.api_services_repository.find() + + async def update_api_service(self, id: str, service_def_yaml: str) -> bool: + """Update an API service.""" + service_def_path = f'api_services/{id}.yaml' + self.cloud_storage_manager.save_small_file( + file_content=service_def_yaml.encode('utf-8'), + bucket_name=self._service_storage_bucket(), + key=service_def_path, + content_type='application/yaml', + ) + await self.api_services_repository.find_one_and_update( + filters={'id': id}, update_data={'service_def_path': service_def_path} + ) + self.cache_manager.add(service_def_path, service_def_yaml) + return True + + async def delete_api_service(self, id: str) -> bool: + """Delete an API service.""" + service_def_path = f'api_services/{id}.yaml' + await self.api_services_repository.delete_all(filters={'id': id}) + self.cloud_storage_manager.delete_file( + bucket_name=self._service_storage_bucket(), file_path=service_def_path + ) + return True + + async def deactivate_api_service(self, id: str) -> bool: + """Deactivate an API service.""" + return await self.api_services_repository.find_one_and_update( + filters={'id': id}, update_data={'is_active': False} + ) + + async def activate_api_service(self, id: str) -> bool: + """Activate an API service.""" + return await self.api_services_repository.find_one_and_update( + filters={'id': id}, update_data={'is_active': True} + ) + + def _service_storage_bucket(self) -> str: + if not SERVICE_DEFINITION_BUCKET: + raise ValueError( + 'SERVICE_DEFINITION_BUCKET is not set in the environment variables' + ) + return SERVICE_DEFINITION_BUCKET diff --git a/wavefront/server/modules/api_services_module/api_services_module/core/proxy.py b/wavefront/server/modules/api_services_module/api_services_module/core/proxy.py new file mode 100644 index 00000000..d91b330f --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/core/proxy.py @@ -0,0 +1,385 @@ +"""Core proxy functionality for handling API requests.""" + +from typing import Any, Dict, Optional, Union +from fastapi import HTTPException +from fastapi.responses import Response +from common_module.log.logger import logger +from ..models.pipeline import PipelineContext, PipelineException +from ..models.service import ProxyResponse, ServiceDefinition, ApiConfig +from ..config.registry import ServiceRegistry +from ..pipeline.builder import PipelineBuilder, PipelineCache +from ..auth.manager import AuthManager +from .manager import ApiServicesManager +from api_services_module.config.parser import ServiceDefinitionParser +from api_services_module.utils.api_change_publisher import ( + ApiChangePublisher, + UpdateMessage, +) + + +class ApiProxy: + """ + Core API proxy that orchestrates the entire request processing pipeline. + + Handles service routing, authentication, and request/response processing + through the pipeline architecture. + """ + + def __init__( + self, + service_registry: ServiceRegistry, + api_services_manager: Optional[ApiServicesManager] = None, + api_change_publisher: Optional[ApiChangePublisher] = None, + ): + self.service_registry = service_registry + self.api_services_manager = api_services_manager + self.api_change_publisher = api_change_publisher + + self.auth_manager = AuthManager() + self.pipeline_cache = PipelineCache() + self.pipeline_builder = PipelineBuilder() + + # Initialize auth manager with all registered services + self._initialize_auth_manager() + + def _require_api_services_manager(self): + if not self.api_services_manager: + raise HTTPException( + status_code=500, + detail='API services manager is not configured for this proxy', + ) + + def _initialize_auth_manager(self): + """Initialize auth manager with all registered services.""" + for service in self.service_registry.get_all_services().values(): + self.auth_manager.register_service_auth(service) + + async def process_request( + self, + service_id: str, + api_id: str, + api_version: str = 'v1', + method: str = 'POST', + path: str = '', + path_params: Optional[Dict[str, str]] = None, + query_params: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, str]] = None, + body: Optional[Any] = None, + ) -> Union[ProxyResponse, Response]: + """ + Process an API request through the proxy pipeline. + + Args: + service_id: Target service identifier + api_id: Target API identifier + api_version: API version (default: v1) + method: HTTP method + path: Request path + path_params: Path parameters extracted from URL + query_params: Query parameters + headers: Request headers + body: Request body + + Returns: + Union[ProxyResponse, Response] with standardized format (JSON/text via ProxyResponse, binary via fastapi.Response) + + Raises: + HTTPException: For various error conditions + """ + # Create pipeline context + context = PipelineContext( + service_id=service_id, + api_id=api_id, + api_version=api_version, + method=method, + path=path, + path_params=path_params or {}, + query_params=query_params or {}, + headers=headers or {}, + body=body, + ) + + context.add_trace( + 'proxy', f'Starting request processing for {service_id}/{api_id}' + ) + + try: + # Get service definition + service_definition = self.service_registry.get_service(service_id) + if not service_definition: + raise HTTPException( + status_code=404, detail=f'Service not found: {service_id}' + ) + + # Get API configuration + api_config = service_definition.get_api_by_id(api_id, api_version) + if not api_config: + raise HTTPException( + status_code=404, + detail=f'API not found: {api_id} (version: {api_version}) in service: {service_id}', + ) + + context.add_trace('proxy', 'Found service and API configuration') + + # Get or build pipeline + pipeline = self._get_or_build_pipeline(service_definition, api_config) + + # Execute pipeline + context = await pipeline.execute(context) + + # Check if response is binary content + if context.is_binary_response and context.raw_response_content is not None: + # Return binary response directly with original headers. Remove content-type from headers to avoid duplication. + context.add_trace('proxy', 'Returning binary response directly') + # Copy headers and pop content-type for media_type + _headers = dict(context.response_headers) + _media_type = _headers.pop('content-type', None) + return Response( + content=context.raw_response_content, + status_code=context.response_status, + headers=_headers, + media_type=_media_type, + ) + + # Create successful response for JSON/text content + response = ProxyResponse.success( + data=context.response_body, + trace=context.execution_trace, + message='Request processed successfully', + http_status_code=context.response_status, + ) + + context.add_trace('proxy', 'Request processing completed successfully') + return response + + except HTTPException as e: + logger.error(f'HTTPException: {str(e)}', exc_info=True) + raise + + except PipelineException as e: + logger.error(f'PipelineException: {str(e)}', exc_info=True) + context.add_trace('proxy', f'Pipeline error: {str(e)}') + return ProxyResponse.error( + message=f'Pipeline error: {e.message}', + trace=context.execution_trace, + status='api_pipeline_error', + http_status_code=502, # Bad Gateway for pipeline errors + ) + + except Exception as e: + logger.error(f'Exception: {str(e)}', exc_info=True) + context.add_trace('proxy', f'Unexpected error: {str(e)}') + return ProxyResponse.error( + message='Internal error', + trace=context.execution_trace, + status='internal_error', + http_status_code=500, + ) + + def _get_or_build_pipeline( + self, service_definition: ServiceDefinition, api_config: ApiConfig + ): + """Get cached pipeline or build new one.""" + # Try to get from cache first + pipeline = self.pipeline_cache.get_pipeline( + service_definition.id, api_config.id, api_config.version + ) + + if pipeline is None: + # Build new pipeline + pipeline = self.pipeline_builder.build_service_pipeline( + service_definition, api_config + ) + + # Cache the pipeline + self.pipeline_cache.cache_pipeline( + service_definition.id, api_config.id, pipeline, api_config.version + ) + + return pipeline + + async def reload_service(self, service_id: str): + """ + Reload a service configuration. + + Args: + service_id: Service to reload + """ + try: + # Reload service definition + await self.service_registry.reload_service(service_id) + + # Re-register auth + service_definition = self.service_registry.get_service(service_id) + if service_definition: + self.auth_manager.register_service_auth(service_definition) + + # Invalidate cached pipelines + self.pipeline_cache.invalidate_service(service_id) + + except Exception as e: + logger.error(f'Failed with error: {str(e)}', exc_info=True) + raise HTTPException( + status_code=500, + detail=f'Failed to reload service {service_id}', + ) + + def remove_service(self, service_id: str): + """ + Remove a service from in-memory state. + + This cleans up: + - Service registry + - Auth manager + - Pipeline cache + + Args: + service_id: Service to remove + """ + logger.info(f'Removing service from in-memory state: {service_id}') + + # Remove from registry + self.service_registry.deregister_service(service_id) + logger.info(f'Removed service from registry: {service_id}') + + # Remove auth handler + self.auth_manager.remove_service_auth(service_id) + logger.info(f'Removed auth handler for service: {service_id}') + + # Invalidate cached pipelines + self.pipeline_cache.invalidate_service(service_id) + logger.info(f'Invalidated pipelines for service: {service_id}') + + def get_service_info(self, service_id: str) -> Dict[str, Any]: + """ + Get information about a service. + + Args: + service_id: Service identifier + + Returns: + Service information dictionary + """ + service_definition = self.service_registry.get_service(service_id) + if not service_definition: + raise HTTPException( + status_code=404, detail=f'Service not found: {service_id}' + ) + + return { + 'service_id': service_definition.id, + 'base_url': service_definition.base_url, + 'auth': service_definition.auth, + 'apis': [ + { + 'id': api.id, + 'version': api.version, + 'path': api.path, + 'method': api.method.value, + 'backend_path': api.backend_path, + 'additional_headers': api.additional_headers, + 'backend_query_params': api.backend_query_params, + 'output_mapper_enabled': api.output_mapper_enabled, + 'output_mapper': api.output_mapper, + } + for api in service_definition.apis + ], + } + + def get_all_services_info(self) -> Dict[str, Any]: + """Get information about all registered services.""" + services = [] + for service_id in self.service_registry.get_service_ids(): + try: + services.append(self.get_service_info(service_id)) + except HTTPException as e: + logger.error(f'Service Exception: {str(e)}', exc_info=True) + # Skip services that can't be loaded + continue + + return { + 'services': services, + 'stats': self.service_registry.get_stats(), + 'cache_stats': self.pipeline_cache.get_stats(), + } + + def health_check(self) -> Dict[str, Any]: + """Perform health check of the proxy.""" + try: + stats = self.service_registry.get_stats() + cache_stats = self.pipeline_cache.get_stats() + + return { + 'status': 'healthy', + 'services_count': stats['total_services'], + 'apis_count': stats['total_apis'], + 'cached_pipelines': cache_stats['cached_pipelines'], + 'auth_types_supported': ['bearer', 'basic', 'api_key'], + } + except Exception as e: + return {'status': 'unhealthy', 'error': str(e)} + + async def create_api_services(self, service_yaml: str): + self._require_api_services_manager() + service_def: ServiceDefinition = ServiceDefinitionParser.parse_yaml_string( + service_yaml + ) + service_id = service_def.id + service = await self.api_services_manager.get_api_service(id=service_id) + if service: + await self.reload_service(service_id) + raise HTTPException( + status_code=400, + detail=f'Service already exists: {service_id}, reloaded again', + ) + await self.api_services_manager.create_api_service( + id=service_id, service_def_yaml=service_yaml + ) + await self.reload_service(service_id) + self.api_change_publisher.publish_message( + UpdateMessage( + service_id=service_id, + operation='create', + metadata={}, + ) + ) + + async def update_api_services(self, id: str, service_yaml: str): + self._require_api_services_manager() + service_def: ServiceDefinition = ServiceDefinitionParser.parse_yaml_string( + service_yaml + ) + service_id = service_def.id + if id != service_id: + raise HTTPException( + status_code=400, detail=f'Service ids dont match: {service_id} vs {id}' + ) + service = await self.api_services_manager.get_api_service(id=service_id) + if not service: + raise HTTPException( + status_code=404, detail=f'Service not found: {service_id}' + ) + await self.api_services_manager.update_api_service( + id=service_id, service_def_yaml=service_yaml + ) + await self.reload_service(service_id) + self.api_change_publisher.publish_message( + UpdateMessage( + service_id=service_id, + operation='update', + metadata={}, + ) + ) + + async def delete_api_services(self, id: str): + self._require_api_services_manager() + await self.api_services_manager.delete_api_service(id) + # Clean up in-memory state + self.remove_service(id) + self.api_change_publisher.publish_message( + UpdateMessage( + service_id=id, + operation='delete', + metadata={}, + ) + ) diff --git a/wavefront/server/modules/api_services_module/api_services_module/core/router.py b/wavefront/server/modules/api_services_module/api_services_module/core/router.py new file mode 100644 index 00000000..bf4d4bab --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/core/router.py @@ -0,0 +1,478 @@ +"""FastAPI router for handling proxy requests.""" + +from fastapi import APIRouter, Request, HTTPException, Response +import json +from typing import Dict, List, Optional + +from .proxy import ApiProxy +from .manager import ApiServicesManager +from ..config.registry import ServiceRegistry +from ..config.parser import ServiceDefinitionParser +from fastapi.responses import JSONResponse +from fastapi import status + +from common_module.response_formatter import ResponseFormatter +from common_module.log.logger import logger + + +class ProxyRouter: + """FastAPI router for handling API proxy requests with dynamic path support.""" + + def __init__( + self, + proxy: ApiProxy, + service_registry: ServiceRegistry, + response_formatter: ResponseFormatter, + api_services_manager: Optional[ApiServicesManager] = None, + ): + self.proxy = proxy + self.service_registry = service_registry + self.api_services_manager = api_services_manager + self.response_formatter = response_formatter + + self.router = APIRouter() + self.app: Optional[object] = None # FastAPI app instance + self.app_prefix: str = '/floware' # Prefix used when including router in app + self._setup_routes() + + def set_app(self, app, prefix: str = '/floware'): + """Set the FastAPI app instance and prefix for dynamic route registration.""" + self.app = app + self.app_prefix = prefix + + def _setup_routes(self): + """Setup all proxy routes including dynamic routes based on service definitions.""" + # Setup static management routes first + self._setup_management_routes() + + # Setup dynamic API routes based on service definitions + # Only set up if services are available (skip if registry is empty) + if self.service_registry.get_all_services(): + self._setup_dynamic_api_routes() + else: + logger.info( + 'Service registry is empty, skipping initial dynamic route setup. Routes will be loaded after services are loaded from database.' + ) + + def _setup_management_routes(self): + """Setup service management and health check routes.""" + + # Service management endpoints + @self.router.get('/v1/api-services/{service_id}') + async def get_service_info(service_id: str): + """Get information about a specific service.""" + service = self.proxy.get_service_info(service_id) + return self.response_formatter.buildSuccessResponse(service) + + @self.router.get('/v1/api-services') + async def get_all_services(): + """Get information about all registered services.""" + services = self.proxy.get_all_services_info() + return self.response_formatter.buildSuccessResponse(services) + + @self.router.post('/v1/api-services/{service_id}/reload') + async def reload_service(service_id: str): + """Reload a service configuration.""" + await self.proxy.reload_service(service_id) + + # Ensure dynamic routes match the reloaded service definition + self.reload_service_routes(service_id) + return {'message': f'Service {service_id} reloaded successfully'} + + # Authentication endpoint (for direct auth API calls) + @self.router.post( + '/v1/api-services/{service_id}/authenticators/{auth_version}/{auth_id}' + ) + async def authenticate_direct( + service_id: str, auth_version: str, auth_id: str, request: Request + ): + """ + Direct authentication endpoint. + + Allows clients to call authentication APIs directly if needed. + """ + # This would be implemented if direct auth calls are needed + # For now, return not implemented + raise HTTPException( + status_code=501, + detail='Direct authentication calls not implemented in Phase 1', + ) + + @self.router.post('/v1/api-services') + async def create_api_services(request: Request): + yaml_content = (await request.body()).decode('utf-8') + # Parse to get service_id before creating + service_def = ServiceDefinitionParser.parse_yaml_string(yaml_content) + service_id = service_def.id + + await self.proxy.create_api_services(yaml_content) + # Reload routes for the newly created service + self.reload_service_routes(service_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=self.response_formatter.buildSuccessResponse( + {'message': 'API Service created'} + ), + ) + + @self.router.put('/v1/api-services/{id}') + async def update_api_services(request: Request, id: str): + yaml_content = (await request.body()).decode('utf-8') + await self.proxy.update_api_services(id, yaml_content) + # Reload routes for the updated service + self.reload_service_routes(id) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=self.response_formatter.buildSuccessResponse( + {'message': 'API Service updated'} + ), + ) + + @self.router.delete('/v1/api-services/{id}') + async def delete_api_services(request: Request, id: str): + await self.proxy.delete_api_services(id) + # Remove routes for the deleted service + self.remove_service_routes(id) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=self.response_formatter.buildSuccessResponse( + {'message': 'API Service deleted'} + ), + ) + + def _setup_dynamic_api_routes(self): + """Setup dynamic API routes based on service definitions.""" + # Get all services and their APIs + route_configs = self._build_route_configurations() + logger.info( + f'Built {len(route_configs)} route configurations from service registry' + ) + + # Sort routes by specificity (more specific routes first) + sorted_routes = self._sort_routes_by_specificity(route_configs) + + # Register routes in order + registered_count = 0 + for route_config in sorted_routes: + self._register_dynamic_route(route_config) + registered_count += 1 + logger.info(f'Registered {registered_count} dynamic API routes') + + def _build_route_configurations(self) -> List[Dict]: + """Build route configurations from all service definitions.""" + route_configs = [] + + for service in self.service_registry.get_all_services().values(): + for api in service.apis: + if api.path: + base = f'/v1/api-services/{service.id}/apis/{api.version}' + exposed_path = self._convert_path_to_fastapi_pattern(api.path) + # Ensure proper path concatenation (handle leading/trailing slashes) + if exposed_path.startswith('/'): + route_pattern = base + exposed_path + else: + route_pattern = base + '/' + exposed_path + else: + raise ValueError(f'API {api.id} has no path') + + route_configs.append( + { + 'pattern': route_pattern, + 'service_id': service.id, + 'api_id': api.id, + 'api_version': api.version, + 'api_path': api.path, + 'api_method': api.method, + 'specificity_score': self._calculate_specificity_score( + route_pattern + ), + } + ) + + return route_configs + + def _convert_path_to_fastapi_pattern(self, api_path: str) -> str: + """ + Convert API path pattern to FastAPI route pattern. + + Example: /users/{id}/orders -> /users/{id}/orders + FastAPI will handle the path parameter extraction. + """ + return api_path + + def _calculate_specificity_score(self, route_pattern: str) -> int: + """ + Calculate specificity score for route ordering. + Higher score = more specific = should be registered first. + + Rules: + - Static segments get higher score than parameterized segments + - Longer paths get higher scores + - Paths with fewer parameters get higher scores + - More specific paths should come before less specific ones + """ + segments = route_pattern.split('/') + score = 0 + param_count = 0 + static_count = 0 + + for segment in segments: + if segment: # Skip empty segments + if '{' in segment and '}' in segment: + # Parameterized segment - lower value + score += 1 + param_count += 1 + else: + # Static segment - higher value + score += 10 + static_count += 1 + + # Add bonus for path length (more segments = more specific) + total_segments = len([s for s in segments if s]) + score += total_segments * 2 + + # Penalty for parameters (fewer parameters = more specific) + score -= param_count * 5 + + # Bonus for static segments after the base API path + # The base path is /v1/services/{service_id}/apis/{api_version}/{api_id} + # So we look at segments after index 5 (0-based) + api_segments = segments[6:] if len(segments) > 6 else [] + api_static_segments = sum( + 1 for seg in api_segments if seg and not ('{' in seg and '}' in seg) + ) + score += api_static_segments * 20 # High bonus for API-level static segments + + return score + + def _sort_routes_by_specificity(self, route_configs: List[Dict]) -> List[Dict]: + """Sort routes by specificity score (highest first).""" + return sorted(route_configs, key=lambda x: x['specificity_score'], reverse=True) + + def _register_dynamic_route(self, route_config: Dict): + """Register a dynamic route with FastAPI.""" + pattern = route_config['pattern'] + service_id = route_config['service_id'] + api_id = route_config['api_id'] + api_version = route_config['api_version'] + + # Create the route handler + async def dynamic_proxy_handler(request: Request, response: Response): + """Dynamic proxy handler for API requests.""" + logger.info( + f'Route handler called for pattern={pattern}, request_path={request.url.path}, method={request.method}' + ) + try: + # Extract path parameters from the request + path_params = self._extract_path_parameters(request.url.path, pattern) + logger.info( + f'Extracted path_params={path_params} for service={service_id}, api={api_id}' + ) + + # Extract request data + headers = dict(request.headers) + query_params = dict(request.query_params) + + # Get request body + body = None + if request.headers.get('content-type', '').startswith( + 'application/json' + ): + try: + body = await request.json() + except json.JSONDecodeError: + body = await request.body() + else: + body_bytes = await request.body() + if body_bytes: + body = body_bytes.decode('utf-8') + + # Process request through proxy + # Client always uses POST, but backend will use api_method from config + proxy_response = await self.proxy.process_request( + service_id=service_id, + api_id=api_id, + api_version=api_version, + method='POST', # Client always uses POST + path=request.url.path, + path_params=path_params, + query_params=query_params, + headers=headers, + body=body, + ) + + # Set response status code + response.status_code = proxy_response.http_status_code + + return proxy_response + + except HTTPException: + raise + except Exception as e: + logger.error(f'Failed to process request: {str(e)}', exc_info=True) + raise HTTPException(status_code=500, detail=str(e)) + + # Register the route with FastAPI + # Clients always use POST to the proxy, backend uses api_method from config + logger.info( + f'Registering route: {pattern} for service={service_id}, api={api_id}, method=POST' + ) + try: + # Add to sub-router (for initial setup) + self.router.add_api_route( + pattern, + dynamic_proxy_handler, + methods=['POST'], + name=f'proxy_{service_id}_{api_id}_{api_version}', + ) + + # Also add directly to app router if app is set (for dynamic route addition) + if self.app is not None: + full_pattern = self.app_prefix + pattern + self.app.router.add_api_route( + full_pattern, + dynamic_proxy_handler, + methods=['POST'], + name=f'proxy_{service_id}_{api_id}_{api_version}_app', + ) + logger.info(f'Also registered route in app router: {full_pattern}') + + logger.info(f'Successfully registered route: {pattern}') + except Exception as e: + logger.error(f'Failed to register route {pattern}: {str(e)}', exc_info=True) + raise + + def _extract_path_parameters( + self, request_path: str, route_pattern: str + ) -> Dict[str, str]: + """ + Extract path parameters from the request path. + + Args: + request_path: The actual request path (includes /floware prefix) + route_pattern: The FastAPI route pattern (without /floware prefix) + + Returns: + Dictionary of parameter names and values + """ + path_params = {} + + # Remove the /floware prefix from request path to match route pattern + if request_path.startswith('/floware'): + adjusted_request_path = request_path[8:] # Remove '/floware' + else: + adjusted_request_path = request_path + + route_parts = route_pattern.split('/') + request_parts = adjusted_request_path.split('/') + + # Match each part of the route pattern with the request + for i, (route_part, request_part) in enumerate(zip(route_parts, request_parts)): + if route_part.startswith('{') and route_part.endswith('}'): + param_name = route_part[1:-1] # Remove { and } + path_params[param_name] = request_part + + return path_params + + def get_router(self) -> APIRouter: + """Get the configured FastAPI router.""" + return self.router + + def reload_routes(self): + """Reload dynamic routes after services are loaded into registry.""" + # Clear existing dynamic routes (keep management routes) + # We'll rebuild all routes to include newly loaded services + # Note: This doesn't remove routes, but adds new ones + # FastAPI will handle duplicate route registration + self._setup_dynamic_api_routes() + + def reload_service_routes(self, service_id: str): + """Reload routes for a specific service after it's been created/updated.""" + service = self.service_registry.get_service(service_id) + if not service: + logger.warning( + f'Service {service_id} not found in registry, skipping route registration' + ) + return + + logger.info(f'Reloading routes for service: {service_id}') + + # Remove old routes first to prevent duplicates when routes are updated + self.remove_service_routes(service_id) + + route_configs = [] + + for api in service.apis: + if api.path: + base = f'/v1/api-services/{service.id}/apis/{api.version}' + exposed_path = self._convert_path_to_fastapi_pattern(api.path) + if exposed_path.startswith('/'): + route_pattern = base + exposed_path + else: + route_pattern = base + '/' + exposed_path + else: + raise ValueError(f'API {api.id} has no path') + + route_configs.append( + { + 'pattern': route_pattern, + 'service_id': service.id, + 'api_id': api.id, + 'api_version': api.version, + 'api_path': api.path, + 'api_method': api.method, + 'specificity_score': self._calculate_specificity_score( + route_pattern + ), + } + ) + + # Sort and register routes + sorted_routes = self._sort_routes_by_specificity(route_configs) + for route_config in sorted_routes: + self._register_dynamic_route(route_config) + + logger.info(f'Registered {len(sorted_routes)} routes for service: {service_id}') + + def remove_service_routes(self, service_id: str): + """Remove all routes for a specific service after it's been deleted.""" + logger.info(f'Removing routes for service: {service_id}') + + # Pattern to match route names for this service + # Routes are named like: proxy_{service_id}_{api_id}_{api_version} + route_name_prefix = f'proxy_{service_id}_' + + # Remove from sub-router + original_count = len(self.router.routes) + self.router.routes = [ + route + for route in self.router.routes + if not ( + hasattr(route, 'name') + and route.name + and route.name.startswith(route_name_prefix) + ) + ] + sub_router_removed = original_count - len(self.router.routes) + logger.info( + f'Removed {sub_router_removed} routes from sub-router for service: {service_id}' + ) + + # Remove from app router if app is set + if self.app is not None: + original_app_count = len(self.app.router.routes) + self.app.router.routes = [ + route + for route in self.app.router.routes + if not ( + hasattr(route, 'name') + and route.name + and route.name.startswith(route_name_prefix) + ) + ] + app_removed = original_app_count - len(self.app.router.routes) + logger.info( + f'Removed {app_removed} routes from app router for service: {service_id}' + ) + + logger.info(f'Completed route removal for service: {service_id}') diff --git a/wavefront/server/modules/api_services_module/api_services_module/env.py b/wavefront/server/modules/api_services_module/api_services_module/env.py new file mode 100644 index 00000000..9204dd1c --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/env.py @@ -0,0 +1,3 @@ +import os + +SERVICE_DEFINITION_BUCKET = os.getenv('APPLICATION_BUCKET') diff --git a/wavefront/server/modules/api_services_module/api_services_module/execution/execute.py b/wavefront/server/modules/api_services_module/api_services_module/execution/execute.py new file mode 100644 index 00000000..428ee7e8 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/execution/execute.py @@ -0,0 +1,81 @@ +"""Execute an API service by calling the backend through the proxy pipeline. + +This function provides a programmatic way to execute API services, handling +authentication, routing, and transformation through the existing pipeline infrastructure. + +Args: + api_service_id: ID of the API service to execute + api_id: ID of the specific API endpoint to call + payload: Request payload/body to send + path_params: Optional path parameters (e.g., {"id": "123"} for /users/{id}) + query_params: Optional query parameters + headers: Optional custom headers to include in the request + api_version: API version (default: "v1") + +Returns: + ProxyResponse object with meta (status, message, trace) and data + +Example: + >>> response = await execute_api_service( + ... api_service_id="crm-service", + ... api_id="get-user", + ... payload={"user_id": "123"}, + ... path_params={"id": "123"} + ... ) + >>> print(response.data) +""" + +from typing import Dict, Any, Optional +from api_services_module.core.proxy import ApiProxy +from api_services_module.models.service import ProxyResponse +from dependency_injector.wiring import inject, Provide +from api_services_module.api_services_container import ApiServicesContainer + + +@inject +async def execute_api_service( + api_service_id: str, + api_id: str, + payload: Optional[dict] = None, + path_params: Optional[Dict[str, str]] = None, + query_params: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, str]] = None, + api_version: str = 'v1', + proxy: Optional[ApiProxy] = Provide[ApiServicesContainer.api_proxy], +) -> ProxyResponse: + """ + Execute an API service by calling the backend through the proxy pipeline. + + Args: + api_service_id: ID of the API service to execute + api_id: ID of the specific API endpoint to call + payload: Request payload/body to send + path_params: Optional path parameters (e.g., {"id": "123"} for /users/{id}) + query_params: Optional query parameters + headers: Optional custom headers to include in the request + api_version: API version (default: "v1") + proxy: Optional ApiProxy instance (if not provided, will need to be injected) + + Returns: + ProxyResponse object with meta (status, message, trace) and data + """ + if proxy is None: + raise ValueError( + 'ApiProxy instance must be provided. ' + 'This function should be called with a proxy instance injected from the container.' + ) + + # Process the request through the proxy pipeline + response = await proxy.process_request( + service_id=api_service_id, + api_id=api_id, + api_version=api_version, + method='POST', # Client always uses POST + path='_workflow', # Path is determined by the API configuration + path_params=path_params or {}, + query_params=query_params or {}, + headers=headers or {}, + body=payload, + ) + + return response diff --git a/wavefront/server/modules/api_services_module/api_services_module/models/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/models/__init__.py new file mode 100644 index 00000000..a30a4c35 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/models/__init__.py @@ -0,0 +1 @@ +"""Models package for API services module.""" diff --git a/wavefront/server/modules/api_services_module/api_services_module/models/pipeline.py b/wavefront/server/modules/api_services_module/api_services_module/models/pipeline.py new file mode 100644 index 00000000..12cf7069 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/models/pipeline.py @@ -0,0 +1,184 @@ +"""Pipeline models and protocols.""" + +from abc import ABC, abstractmethod +from enum import Enum +from typing import Any, Dict, Optional, List +from dataclasses import dataclass, field +import uuid +from datetime import datetime + + +class StageType(Enum): + """Pipeline stage types.""" + + AUTHENTICATOR = 'authenticator' + HEADER_INJECTOR = 'header_injector' + API_PROCESSOR = 'api_processor' + REQUEST_SENDER = 'request_sender' + RESPONSE_MAPPER = 'response_mapper' + COMPOSITE = 'composite' + + +@dataclass +class PipelineContext: + """Context object that flows through pipeline stages.""" + + # Request information + request_id: str = field(default_factory=lambda: str(uuid.uuid4())) + service_id: str = '' + api_id: str = '' + api_version: str = 'v1' + method: str = 'POST' + path: str = '' + path_params: Dict[str, str] = field( + default_factory=dict + ) # Path parameters extracted from URL + query_params: Dict[str, Any] = field(default_factory=dict) + headers: Dict[str, str] = field(default_factory=dict) + body: Optional[Any] = None + + # Authentication context + auth_config: Dict[str, Any] = field(default_factory=dict) + auth_headers: Dict[str, str] = field(default_factory=dict) + + # Backend information + backend_url: str = '' + backend_path: str = '' + backend_headers: Dict[str, str] = field(default_factory=dict) + + # Response information + response_status: Optional[int] = None + response_headers: Dict[str, str] = field(default_factory=dict) + response_body: Optional[Any] = None + is_binary_response: bool = False # Flag to indicate binary content + raw_response_content: Optional[bytes] = None # Raw bytes for binary responses + + # Execution trace + execution_trace: List[str] = field(default_factory=list) + start_time: datetime = field(default_factory=datetime.now) + + def add_trace(self, stage_name: str, message: str = ''): + """Add execution trace entry.""" + timestamp = datetime.now().isoformat() + trace_entry = f'[{timestamp}] {stage_name}' + if message: + trace_entry += f': {message}' + self.execution_trace.append(trace_entry) + + def merge_headers(self, new_headers: Dict[str, str]): + """Merge new headers with existing headers.""" + self.headers.update(new_headers) + + def merge_backend_headers(self, new_headers: Dict[str, str]): + """Merge new headers with backend headers.""" + self.backend_headers.update(new_headers) + + +class PipelineException(Exception): + """Exception raised during pipeline execution.""" + + def __init__( + self, + message: str, + stage_name: str = '', + context: Optional[PipelineContext] = None, + ): + self.message = message + self.stage_name = stage_name + self.context = context + super().__init__(message) + + +class PipelineStage(ABC): + """ + Abstract base class for all pipeline components. + + All pipeline stages (atomic and composite) must implement this protocol. + This ensures uniform behavior across the entire pipeline architecture. + """ + + @abstractmethod + async def execute(self, context: PipelineContext) -> PipelineContext: + """ + Execute the pipeline stage. + + Args: + context: Pipeline context that flows through stages + + Returns: + Modified context (same object, modified in-place) + + Raises: + PipelineException: If any stage fails + """ + pass + + @abstractmethod + def get_stage_type(self) -> StageType: + """Get the type of this pipeline stage.""" + pass + + @abstractmethod + def get_name(self) -> str: + """Get the name/identifier of this pipeline stage.""" + pass + + +class CompositePipelineStage(PipelineStage): + """ + Composite pipeline stage that executes multiple stages in sequence. + + Implements the Composite pattern for building complex pipelines + from simpler components. + """ + + def __init__(self, name: str, stages: List[PipelineStage]): + self.name = name + self.stages = stages + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Execute all stages in sequence.""" + context.add_trace( + self.get_name(), + f'Starting composite pipeline with {len(self.stages)} stages', + ) + + try: + for stage in self.stages: + context = await stage.execute(context) + + context.add_trace( + self.get_name(), 'Composite pipeline completed successfully' + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Composite pipeline failed: {str(e)}') + if isinstance(e, PipelineException): + raise + else: + raise PipelineException( + f"Composite pipeline '{self.name}' failed: {str(e)}", + self.get_name(), + context, + ) + + def get_stage_type(self) -> StageType: + """Return composite stage type.""" + return StageType.COMPOSITE + + def get_name(self) -> str: + """Return the composite pipeline name.""" + return self.name + + def add_stage(self, stage: PipelineStage): + """Add a stage to the pipeline.""" + self.stages.append(stage) + + def remove_stage(self, stage_name: str) -> bool: + """Remove a stage by name. Returns True if removed, False if not found.""" + for i, stage in enumerate(self.stages): + if stage.get_name() == stage_name: + self.stages.pop(i) + return True + return False diff --git a/wavefront/server/modules/api_services_module/api_services_module/models/service.py b/wavefront/server/modules/api_services_module/api_services_module/models/service.py new file mode 100644 index 00000000..a0db442b --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/models/service.py @@ -0,0 +1,122 @@ +"""Service definition models.""" + +from dataclasses import dataclass, field +from typing import Dict, List, Optional, Any +from enum import Enum + + +class AuthType(Enum): + """Supported authentication types.""" + + BEARER = 'bearer' + BASIC = 'basic' + API_KEY = 'api_key' + + +class HttpMethod(Enum): + """Supported HTTP methods.""" + + GET = 'GET' + POST = 'POST' + PUT = 'PUT' + DELETE = 'DELETE' + PATCH = 'PATCH' + + +@dataclass +class AuthConfig: + """Authentication configuration.""" + + id: str + type: AuthType + version: str = 'v1' + base_url: Optional[str] = None + path: str = '' + additional_headers: Dict[str, str] = field(default_factory=dict) + + # Auth type-specific configurations + token: Optional[str] = None # For bearer auth + username: Optional[str] = None # For basic auth + password: Optional[str] = None # For basic auth + api_key: Optional[str] = None # For API key auth + api_key_header: str = 'X-API-Key' # Header name for API key + + +@dataclass +class ApiConfig: + """API endpoint configuration.""" + + id: str + # Exposed proxy path (e.g., /get-objects or /get-objects/{id}) + path: str + # Backend path template (e.g., /objects or /objects/{id}) + backend_path: str + method: HttpMethod + version: str = 'v1' + additional_headers: Dict[str, str] = field(default_factory=dict) + # Backend query parameters to be sent with the request + backend_query_params: Dict[str, Any] = field(default_factory=dict) + + # Output mapping configuration (simplified for Phase 1) + output_mapper_enabled: bool = False + output_mapper: Dict[str, str] = field(default_factory=dict) + + +@dataclass +class ServiceDefinition: + """Complete service definition.""" + + id: str + base_url: str + auth: AuthConfig + apis: List[ApiConfig] = field(default_factory=list) + + def get_api_by_id(self, api_id: str, version: str = 'v1') -> Optional[ApiConfig]: + """Get API configuration by ID and version.""" + for api in self.apis: + if api.id == api_id and api.version == version: + return api + return None + + def get_api_ids(self) -> List[str]: + """Get list of all API IDs.""" + return [api.id for api in self.apis] + + +@dataclass +class ProxyResponse: + """Standardized proxy response.""" + + meta: Dict[str, Any] + data: Any + http_status_code: int = 200 + + @classmethod + def success( + cls, + data: Any, + trace: List[str], + message: str = 'Success', + http_status_code: int = 200, + ) -> 'ProxyResponse': + """Create a successful response.""" + return cls( + meta={'status': 'success', 'message': message, 'trace': trace}, + data=data, + http_status_code=http_status_code, + ) + + @classmethod + def error( + cls, + message: str, + trace: List[str], + status: str = 'error', + http_status_code: int = 500, + ) -> 'ProxyResponse': + """Create an error response.""" + return cls( + meta={'status': status, 'message': message, 'trace': trace}, + data=None, + http_status_code=http_status_code, + ) diff --git a/wavefront/server/modules/api_services_module/api_services_module/pipeline/__init__.py b/wavefront/server/modules/api_services_module/api_services_module/pipeline/__init__.py new file mode 100644 index 00000000..d39cdc63 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/pipeline/__init__.py @@ -0,0 +1 @@ +"""Pipeline package for API services module.""" diff --git a/wavefront/server/modules/api_services_module/api_services_module/pipeline/builder.py b/wavefront/server/modules/api_services_module/api_services_module/pipeline/builder.py new file mode 100644 index 00000000..334ffe1d --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/pipeline/builder.py @@ -0,0 +1,154 @@ +"""Pipeline builder for creating service pipelines.""" + +from typing import List +from ..models.pipeline import CompositePipelineStage, PipelineStage +from ..models.service import ServiceDefinition, ApiConfig +from ..auth.handlers import AuthHandlerFactory +from .stages import ( + RequestHeadersForwarderStage, + HeaderInjectorStage, + ApiProcessorStage, + RequestSenderStage, + ResponseMapperStage, +) + + +class PipelineBuilder: + """Builder for creating service pipelines using the composite pattern.""" + + @staticmethod + def build_auth_pipeline( + service_definition: ServiceDefinition, + ) -> CompositePipelineStage: + """ + Build authentication pipeline for a service. + + Pipeline: [Authenticator โ†’ Header Injector] + """ + stages: List[PipelineStage] = [] + + # 1. Authenticator stage + auth_handler = AuthHandlerFactory.create_handler(service_definition.auth) + stages.append(auth_handler) + + # 2. Auth header injector (for additional auth headers) + if service_definition.auth.additional_headers: + auth_header_injector = HeaderInjectorStage( + service_definition.auth.additional_headers, 'auth_header_injector' + ) + stages.append(auth_header_injector) + + return CompositePipelineStage( + name=f'auth_pipeline_{service_definition.id}', stages=stages + ) + + @staticmethod + def build_api_pipeline( + service_definition: ServiceDefinition, api_config: ApiConfig + ) -> CompositePipelineStage: + """ + Build API processing pipeline for a specific API. + + Pipeline: [API Processor โ†’ Header Injector โ†’ Request Sender โ†’ Response Mapper] + Note: Skipping preprocessor and postprocessor as requested + """ + stages: List[PipelineStage] = [] + + # 1. API processor stage + api_processor = ApiProcessorStage(api_config, service_definition) + stages.append(api_processor) + + # 2. API header injector (for additional API headers) + if api_config.additional_headers: + api_header_injector = HeaderInjectorStage( + api_config.additional_headers, f'api_header_injector_{api_config.id}' + ) + stages.append(api_header_injector) + + # 3. Request sender stage + request_sender = RequestSenderStage() + stages.append(request_sender) + + # 4. Response mapper stage + response_mapper = ResponseMapperStage(api_config) + stages.append(response_mapper) + + return CompositePipelineStage( + name=f'api_pipeline_{service_definition.id}_{api_config.id}', stages=stages + ) + + @staticmethod + def build_service_pipeline( + service_definition: ServiceDefinition, api_config: ApiConfig + ) -> CompositePipelineStage: + """ + Build complete service pipeline. + + Pipeline: [Request Headers Forwarder โ†’ Auth Pipeline โ†’ API Pipeline] + """ + stages: List[PipelineStage] = [] + + # 0. Request headers forwarder (forward incoming headers to backend) + headers_forwarder = RequestHeadersForwarderStage() + stages.append(headers_forwarder) + + # 1. Authentication pipeline + auth_pipeline = PipelineBuilder.build_auth_pipeline(service_definition) + stages.append(auth_pipeline) + + # 2. API processing pipeline + api_pipeline = PipelineBuilder.build_api_pipeline( + service_definition, api_config + ) + stages.append(api_pipeline) + + return CompositePipelineStage( + name=f'service_pipeline_{service_definition.id}_{api_config.id}', + stages=stages, + ) + + +class PipelineCache: + """Cache for compiled pipelines to improve performance.""" + + def __init__(self): + self._pipeline_cache = {} + + def get_pipeline( + self, service_id: str, api_id: str, api_version: str = 'v1' + ) -> CompositePipelineStage: + """Get cached pipeline or None if not found.""" + cache_key = f'{service_id}:{api_id}:{api_version}' + return self._pipeline_cache.get(cache_key) + + def cache_pipeline( + self, + service_id: str, + api_id: str, + pipeline: CompositePipelineStage, + api_version: str = 'v1', + ): + """Cache a compiled pipeline.""" + cache_key = f'{service_id}:{api_id}:{api_version}' + self._pipeline_cache[cache_key] = pipeline + + def invalidate_service(self, service_id: str): + """Invalidate all pipelines for a service.""" + keys_to_remove = [ + key + for key in self._pipeline_cache.keys() + if key.startswith(f'{service_id}:') + ] + for key in keys_to_remove: + del self._pipeline_cache[key] + + def clear_all(self): + """Clear all cached pipelines.""" + self._pipeline_cache.clear() + + def get_stats(self) -> dict: + """Get cache statistics.""" + return { + 'cached_pipelines': len(self._pipeline_cache), + 'cache_keys': list(self._pipeline_cache.keys()), + } diff --git a/wavefront/server/modules/api_services_module/api_services_module/pipeline/stages.py b/wavefront/server/modules/api_services_module/api_services_module/pipeline/stages.py new file mode 100644 index 00000000..5a7aa9d6 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/pipeline/stages.py @@ -0,0 +1,447 @@ +"""Concrete pipeline stages for API processing.""" + +import asyncio +import httpx +from typing import Dict, Any +from urllib.parse import urljoin + +from ..models.pipeline import ( + PipelineStage, + PipelineContext, + StageType, + PipelineException, +) +from ..models.service import ApiConfig, ServiceDefinition + + +class RequestHeadersForwarderStage(PipelineStage): + """Pipeline stage for forwarding incoming request headers to backend.""" + + # Headers that should NOT be forwarded to the backend + # Includes hop-by-hop headers and authentication headers + EXCLUDED_HEADERS = { + # Hop-by-hop headers (should not be forwarded) + 'host', + 'content-length', + 'transfer-encoding', + 'connection', + 'keep-alive', + 'proxy-authenticate', + 'proxy-authorization', + 'te', + 'trailers', + 'upgrade', + # Authentication headers (will be set by auth pipeline) + 'authorization', + 'x-api-key', + 'api-key', + 'x-auth-token', + 'cookie', + 'set-cookie', + 'x-client-key', + } + + def __init__(self): + pass + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Forward incoming request headers to backend headers.""" + context.add_trace( + self.get_name(), f'Forwarding {len(context.headers)} incoming headers' + ) + + try: + # Filter and forward headers + forwarded_count = 0 + for header_name, header_value in context.headers.items(): + # Skip excluded headers + if header_name.lower() in self.EXCLUDED_HEADERS: + continue + + # Forward the header to backend + context.backend_headers[header_name] = header_value + forwarded_count += 1 + + context.add_trace( + self.get_name(), f'Forwarded {forwarded_count} headers to backend' + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Header forwarding failed: {str(e)}') + raise PipelineException( + f'Header forwarding failed: {str(e)}', self.get_name(), context + ) + + def get_stage_type(self) -> StageType: + """Return header forwarder stage type.""" + return StageType.HEADER_INJECTOR + + def get_name(self) -> str: + """Return stage name.""" + return 'request_headers_forwarder' + + +class HeaderInjectorStage(PipelineStage): + """Pipeline stage for injecting additional headers.""" + + def __init__(self, headers: Dict[str, str], stage_name: str = 'header_injector'): + self.headers = headers + self.stage_name = stage_name + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Inject additional headers into the context.""" + context.add_trace(self.get_name(), f'Injecting {len(self.headers)} headers') + + try: + context.merge_backend_headers(self.headers) + context.add_trace(self.get_name(), 'Headers injected successfully') + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Header injection failed: {str(e)}') + raise PipelineException( + f'Header injection failed: {str(e)}', self.get_name(), context + ) + + def get_stage_type(self) -> StageType: + """Return header injector stage type.""" + return StageType.HEADER_INJECTOR + + def get_name(self) -> str: + """Return stage name.""" + return self.stage_name + + +class ApiProcessorStage(PipelineStage): + """Pipeline stage for processing API configuration.""" + + def __init__(self, api_config: ApiConfig, service_definition: ServiceDefinition): + self.api_config = api_config + self.service_definition = service_definition + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Process API configuration and prepare backend request.""" + context.add_trace(self.get_name(), f'Processing API: {self.api_config.id}') + + try: + # Set backend URL and path + context.backend_url = self.service_definition.base_url + context.backend_path = self._substitute_path_parameters( + self.api_config.backend_path, context.path_params + ) + + # Add API-specific headers + context.merge_backend_headers(self.api_config.additional_headers) + + # Merge backend query params (backend config params first, then incoming params can override) + if self.api_config.backend_query_params: + merged_params = dict(self.api_config.backend_query_params) + merged_params.update(context.query_params) + context.query_params = merged_params + context.add_trace( + self.get_name(), + f'Merged {len(self.api_config.backend_query_params)} backend query params', + ) + + # Store API config in context for later stages + context.auth_config.update( + { + 'api_id': self.api_config.id, + 'api_version': self.api_config.version, + 'api_method': self.api_config.method.value, + } + ) + + context.add_trace( + self.get_name(), f'API processing completed for {self.api_config.id}' + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'API processing failed: {str(e)}') + raise PipelineException( + f'API processing failed: {str(e)}', self.get_name(), context + ) + + def get_stage_type(self) -> StageType: + """Return API processor stage type.""" + return StageType.API_PROCESSOR + + def get_name(self) -> str: + """Return stage name.""" + return f'api_processor_{self.api_config.id}' + + def _substitute_path_parameters( + self, path_template: str, path_params: Dict[str, str] + ) -> str: + """ + Substitute path parameters in the path template. + + Args: + path_template: Path template with parameters like /users/{id}/orders + path_params: Dictionary of parameter values + + Returns: + Path with parameters substituted + """ + result_path = path_template + for param_name, param_value in path_params.items(): + result_path = result_path.replace(f'{{{param_name}}}', param_value) + return result_path + + +class RequestSenderStage(PipelineStage): + """Pipeline stage for sending requests to backend services.""" + + def __init__(self, timeout: int = 30, max_retries: int = 3): + self.timeout = timeout + self.max_retries = max_retries + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Send request to backend service.""" + context.add_trace(self.get_name(), f'Sending request to {context.backend_url}') + + try: + # Construct full URL + full_url = urljoin( + context.backend_url.rstrip('/') + '/', context.backend_path.lstrip('/') + ) + + # Get method from API config or default to POST + method = context.auth_config.get('api_method', 'POST') + + # Prepare request parameters + request_params = { + 'method': method, + 'url': full_url, + 'headers': context.backend_headers, + 'params': context.query_params, + 'timeout': self.timeout, + } + + # Add body for methods that support it + if method in ['POST', 'PUT', 'PATCH'] and context.body is not None: + if isinstance(context.body, dict): + request_params['json'] = context.body + else: + request_params['content'] = context.body + + # Send request with retry logic + response = await self._send_with_retry(request_params) + + # Store response in context + context.response_status = response.status_code + context.response_headers = dict(response.headers) + + # Check if response is binary content + content_type = response.headers.get('content-type', '').lower() + is_binary = self._is_binary_content_type(content_type) + + if is_binary: + # Store raw bytes for binary content + context.is_binary_response = True + context.raw_response_content = response.content + context.response_body = None # Don't parse binary as JSON/text + context.add_trace( + self.get_name(), + f'Received binary response ({content_type}), size: {len(response.content)} bytes', + ) + else: + # Parse response body for text/json content + try: + context.response_body = response.json() + except Exception: + context.response_body = response.text + + context.add_trace( + self.get_name(), f'Request completed with status {response.status_code}' + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Request failed: {str(e)}') + raise PipelineException( + f'Backend request failed: {str(e)}', self.get_name(), context + ) + + async def _send_with_retry(self, request_params: Dict[str, Any]) -> httpx.Response: + """Send request with retry logic using async httpx client.""" + last_exception = None + + for attempt in range(self.max_retries): + try: + async with httpx.AsyncClient() as client: + response = await client.request(**request_params) + + # Check status code - 4xx are valid responses, don't retry + # 5xx server errors should be retried + if 400 <= response.status_code < 500: + # Client error (4xx) - return as valid response, don't retry + return response + elif 500 <= response.status_code < 600: + # Server error (5xx) - raise to trigger retry + response.raise_for_status() + else: + # Success (2xx, 3xx) - return response + return response + + except httpx.HTTPStatusError as e: + # Handle HTTPStatusError (from raise_for_status) + if 400 <= e.response.status_code < 500: + # Client error (4xx) - return as valid response, don't retry + return e.response + # Server error (5xx) - will retry + last_exception = e + + except (httpx.RequestError, httpx.TimeoutException) as e: + # Connection errors, timeouts, etc. - will retry + last_exception = e + + # Wait before retry (exponential backoff using asyncio.sleep) + if attempt < self.max_retries - 1 and last_exception is not None: + await asyncio.sleep(2**attempt) + + # All retries failed + if last_exception is not None: + raise last_exception + raise Exception('Request failed after all retries') + + def _is_binary_content_type(self, content_type: str) -> bool: + """ + Check if content type indicates binary content. + + Args: + content_type: Content-Type header value (already lowercased) + + Returns: + True if binary content, False otherwise + """ + # List of binary content type patterns + binary_patterns = [ + 'audio/', + 'video/', + 'image/', + 'application/octet-stream', + 'application/pdf', + 'application/zip', + 'application/x-tar', + 'application/x-gzip', + 'multipart/', + ] + + # Check if any binary pattern matches + for pattern in binary_patterns: + if pattern in content_type: + return True + + # Default to text/json for everything else + return False + + def get_stage_type(self) -> StageType: + """Return request sender stage type.""" + return StageType.REQUEST_SENDER + + def get_name(self) -> str: + """Return stage name.""" + return 'request_sender' + + +class ResponseMapperStage(PipelineStage): + """Pipeline stage for mapping response fields.""" + + def __init__(self, api_config: ApiConfig): + self.api_config = api_config + + async def execute(self, context: PipelineContext) -> PipelineContext: + """Map response fields if output mapper is enabled.""" + context.add_trace(self.get_name(), 'Processing response mapping') + + try: + if ( + not self.api_config.output_mapper_enabled + or not self.api_config.output_mapper + ): + context.add_trace( + self.get_name(), 'No output mapping configured, skipping' + ) + return context + + if not isinstance(context.response_body, dict): + context.add_trace( + self.get_name(), 'Response body is not a dict, skipping mapping' + ) + return context + + # Apply field mapping + mapped_response = self._apply_field_mapping( + context.response_body, self.api_config.output_mapper + ) + + context.response_body = mapped_response + context.add_trace( + self.get_name(), + f'Applied {len(self.api_config.output_mapper)} field mappings', + ) + return context + + except Exception as e: + context.add_trace(self.get_name(), f'Response mapping failed: {str(e)}') + raise PipelineException( + f'Response mapping failed: {str(e)}', self.get_name(), context + ) + + def _apply_field_mapping( + self, data: Dict[str, Any], mapping: Dict[str, str] + ) -> Dict[str, Any]: + """Apply field mapping to response data.""" + mapped_data = {} + + for source_path, target_path in mapping.items(): + try: + # Get value from source path (supports dot notation) + value = self._get_nested_value(data, source_path) + + # Set value at target path (supports dot notation) + self._set_nested_value(mapped_data, target_path, value) + + except KeyError: + # Source field doesn't exist, skip this mapping + continue + + return mapped_data + + def _get_nested_value(self, data: Dict[str, Any], path: str) -> Any: + """Get value from nested dictionary using dot notation.""" + keys = path.split('.') + current = data + + for key in keys: + if isinstance(current, dict) and key in current: + current = current[key] + else: + raise KeyError(f"Path '{path}' not found in data") + + return current + + def _set_nested_value(self, data: Dict[str, Any], path: str, value: Any): + """Set value in nested dictionary using dot notation.""" + keys = path.split('.') + current = data + + # Navigate to the parent of the target key + for key in keys[:-1]: + if key not in current: + current[key] = {} + current = current[key] + + # Set the final value + current[keys[-1]] = value + + def get_stage_type(self) -> StageType: + """Return response mapper stage type.""" + return StageType.RESPONSE_MAPPER + + def get_name(self) -> str: + """Return stage name.""" + return f'response_mapper_{self.api_config.id}' diff --git a/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_processor.py b/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_processor.py new file mode 100644 index 00000000..f3173b88 --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_processor.py @@ -0,0 +1,38 @@ +from common_module.log.logger import logger +from api_services_module.core.router import ProxyRouter +import json +from api_services_module.utils.api_change_publisher import UpdateMessage + + +class ApiChangeProcessor: + def __init__(self, proxy_router: ProxyRouter): + self.proxy_router = proxy_router + + async def process_message(self, message: str): + logger.debug(f'Processing message: {message}') + try: + update_message = UpdateMessage(**json.loads(message)) + + if update_message.operation in ['update', 'create']: + logger.info(f'Reloading service: {update_message.service_id}') + await self.proxy_router.proxy.reload_service( + service_id=update_message.service_id + ) + self.proxy_router.reload_service_routes( + service_id=update_message.service_id + ) + + elif update_message.operation == 'delete': + logger.info(f'Removing service: {update_message.service_id}') + self.proxy_router.proxy.remove_service( + service_id=update_message.service_id + ) + self.proxy_router.remove_service_routes( + service_id=update_message.service_id + ) + + else: + logger.error(f'Invalid operation: {update_message.operation}') + + except Exception as e: + logger.error(f'Error processing message: {e}') diff --git a/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_publisher.py b/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_publisher.py new file mode 100644 index 00000000..73219e0c --- /dev/null +++ b/wavefront/server/modules/api_services_module/api_services_module/utils/api_change_publisher.py @@ -0,0 +1,24 @@ +from db_repo_module.cache.cache_manager import CacheManager +import json +from dataclasses import dataclass, asdict +from typing import Optional + +REDIS_API_SERVICE_UPDATES_CHANNEL = 'floware/api_service/updates' + + +@dataclass +class UpdateMessage: + service_id: str + operation: str + metadata: Optional[dict] = None + + +class ApiChangePublisher: + def __init__(self, cache_manager: CacheManager): + self.cache_manager = cache_manager + + def publish_message(self, message: UpdateMessage): + self.cache_manager.publish( + channel=REDIS_API_SERVICE_UPDATES_CHANNEL, + message=json.dumps(asdict(message)), + ) diff --git a/wavefront/server/modules/api_services_module/pyproject.toml b/wavefront/server/modules/api_services_module/pyproject.toml new file mode 100644 index 00000000..25313ef2 --- /dev/null +++ b/wavefront/server/modules/api_services_module/pyproject.toml @@ -0,0 +1,36 @@ +[project] +name = "api-services-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] + +requires-python = ">=3.11" +dependencies = [ + "fastapi>=0.104.0", + "uvicorn[standard]>=0.24.0", + "httpx>=0.25.0", + "pyyaml>=6.0.1", + "pydantic>=2.0.0", + "dependency-injector>=4.41.0", + "common-module", + "db-repo-module", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.hatch.build.targets.wheel] +packages = ["api_services_module"] diff --git a/wavefront/server/modules/api_services_module/tests/__init__.py b/wavefront/server/modules/api_services_module/tests/__init__.py new file mode 100644 index 00000000..7a636a85 --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/__init__.py @@ -0,0 +1 @@ +"""Tests package for API Services Module.""" diff --git a/wavefront/server/modules/api_services_module/tests/conftest.py b/wavefront/server/modules/api_services_module/tests/conftest.py new file mode 100644 index 00000000..a94d3f54 --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/conftest.py @@ -0,0 +1,724 @@ +""" +Pytest configuration and fixtures for API Services Module. + +This file provides reusable fixtures for testing the API services module +components including containers, services, authentication, and pipelines. +""" + +import pytest +from unittest.mock import Mock +from io import BytesIO +from typing import Dict, Any +import yaml +from dependency_injector import providers + +from api_services_module.api_services_container import create_api_services_container +from api_services_module.config.registry import ServiceRegistry +from api_services_module.config.parser import ServiceDefinitionParser +from api_services_module.auth.manager import AuthManager +from api_services_module.core.proxy import ApiProxy +from api_services_module.core.router import ProxyRouter +from api_services_module.models.service import ( + ServiceDefinition, + AuthConfig, + ApiConfig, + AuthType, + HttpMethod, +) +from api_services_module.models.pipeline import PipelineContext +from api_services_module.pipeline.builder import PipelineBuilder, PipelineCache +from common_module.models.response import Meta +from common_module.models.response import ResponseModel + + +# ============================================================================ +# Mock Dependencies +# ============================================================================ + + +class MockDatabaseClient: + """Mock database client for testing.""" + + def __init__(self): + self.connected = False + self.migration_run = False + + async def connect(self): + """Mock connect method.""" + self.connected = True + + def run_migration(self): + """Mock migration method.""" + self.migration_run = True + + def is_connected(self) -> bool: + """Check if connected.""" + return self.connected + + +class MockCacheManager: + """Mock cache manager for testing.""" + + def __init__(self): + self._cache: Dict[str, Any] = {} + + def get(self, key: str): + """Mock get method.""" + return self._cache.get(key) + + def set(self, key: str, value: Any, ttl: int = None): + """Mock set method.""" + self._cache[key] = value + + def add(self, key: str, value: Any, ttl: int = None): + """Alias for cache add to match production interface.""" + self.set(key, value, ttl) + + def delete(self, key: str): + """Mock delete method.""" + self._cache.pop(key, None) + + def clear(self): + """Clear all cached items.""" + self._cache.clear() + + def get_str(self, key: str): + """Return cached string value.""" + value = self.get(key) + if value is None: + return None + return value + + +class MockCloudStorageManager: + """Mock cloud storage manager for testing.""" + + def __init__(self): + self._storage: Dict[str, bytes] = {} + self.last_saved: Dict[str, Dict[str, Any]] = {} + + def save_small_file( + self, file_content: bytes, bucket_name: str, key: str, content_type: str + ): + """Mock save small file.""" + self._storage[key] = file_content + self.last_saved[key] = { + 'bucket': bucket_name, + 'content_type': content_type, + } + + def read_file(self, bucket_name: str, file_path: str) -> BytesIO: + """Mock read file.""" + data = self._storage.get(file_path, b'') + return BytesIO(data) + + def delete_file(self, bucket_name: str, file_path: str): + """Mock delete file.""" + self._storage.pop(file_path, None) + + +class MockApiServiceRecord: + """Lightweight API service record.""" + + def __init__(self, service_id: str): + self.id = service_id + self.service_def_path = f'api_services/{service_id}.yaml' + self.is_active = True + + +class MockApiServicesRepository: + """In-memory repository for API services metadata.""" + + def __init__(self): + self._data: Dict[str, MockApiServiceRecord] = {} + + def create(self, **kwargs): + record = MockApiServiceRecord(kwargs['id']) + self._data[record.id] = record + return record + + def find_one(self, id: str): + return self._data.get(id) + + def find(self): + return list(self._data.values()) + + def find_one_and_update(self, filters: Dict[str, Any], update_data: Dict[str, Any]): + record = self._data.get(filters.get('id')) + if not record: + return None + for key, value in update_data.items(): + setattr(record, key, value) + return record + + def delete_all(self, filters: Dict[str, Any]): + self._data.pop(filters.get('id'), None) + + +class MockApiServicesManager: + """Mock ApiServicesManager that sources YAML from in-memory map.""" + + def __init__(self, service_yaml_map: Dict[str, str]): + self._yaml_map = service_yaml_map + self._repository = MockApiServicesRepository() + for service_id in self._yaml_map.keys(): + self._repository.create(id=service_id, service_def_path='') + + async def get_all_api_services(self): + return self._repository.find() + + def fetch_service_def(self, service_record): + return self._yaml_map[service_record.id] + + async def get_api_service(self, id: str): + return self._repository.find_one(id=id) + + async def create_api_service(self, id: str, service_def_yaml: str): + self._yaml_map[id] = service_def_yaml + return self._repository.create(id=id, service_def_path='') + + def update_api_service(self, id: str, service_def_yaml: str): + self._yaml_map[id] = service_def_yaml + return self._repository.find_one(id=id) + + async def delete_api_service(self, id: str): + self._yaml_map.pop(id, None) + self._repository.delete_all({'id': id}) + + +@pytest.fixture +def mock_db_client(): + """Provide a mock database client.""" + return MockDatabaseClient() + + +@pytest.fixture +def mock_cache_manager(): + """Provide a mock cache manager.""" + return MockCacheManager() + + +# ============================================================================ +# Service Definition Fixtures +# ============================================================================ + + +@pytest.fixture +def sample_bearer_auth_config(): + """Sample Bearer authentication configuration.""" + return AuthConfig( + id='test-bearer-auth', + type=AuthType.BEARER, + version='v1', + token='test-bearer-token-123', + additional_headers={'X-Client-ID': 'test-client'}, + ) + + +@pytest.fixture +def sample_basic_auth_config(): + """Sample Basic authentication configuration.""" + return AuthConfig( + id='test-basic-auth', + type=AuthType.BASIC, + version='v1', + username='test_user', + password='test_password', + additional_headers={'X-Auth-Type': 'basic'}, + ) + + +@pytest.fixture +def sample_api_key_auth_config(): + """Sample API Key authentication configuration.""" + return AuthConfig( + id='test-apikey-auth', + type=AuthType.API_KEY, + version='v1', + api_key='test-api-key-456', + api_key_header='X-API-Key', + additional_headers={'X-Service': 'test'}, + ) + + +@pytest.fixture +def sample_api_configs(): + """Sample API configurations.""" + return [ + ApiConfig( + id='get-users', + path='/users', + backend_path='/users', + method=HttpMethod.GET, + version='v1', + additional_headers={'X-Feature': 'user-list'}, + ), + ApiConfig( + id='create-user', + path='/users', + backend_path='/users', + method=HttpMethod.POST, + version='v1', + additional_headers={'X-Feature': 'user-create'}, + ), + ApiConfig( + id='get-user-orders', + path='/users/{id}/orders', + backend_path='/users/{id}/orders', + method=HttpMethod.GET, + version='v1', + output_mapper_enabled=True, + output_mapper={ + 'order_id': 'id', + 'order_date': 'created_at', + 'customer.name': 'customer_name', + }, + ), + ] + + +@pytest.fixture +def sample_service_definition(sample_bearer_auth_config, sample_api_configs): + """Sample service definition with Bearer auth.""" + return ServiceDefinition( + id='test-service', + base_url='https://api.test-service.com', + auth=sample_bearer_auth_config, + apis=sample_api_configs, + ) + + +@pytest.fixture +def sample_basic_service_definition(sample_basic_auth_config, sample_api_configs): + """Sample service definition with Basic auth.""" + return ServiceDefinition( + id='test-basic-service', + base_url='https://api.basic-service.com', + auth=sample_basic_auth_config, + apis=sample_api_configs, + ) + + +@pytest.fixture +def sample_apikey_service_definition(sample_api_key_auth_config, sample_api_configs): + """Sample service definition with API Key auth.""" + return ServiceDefinition( + id='test-apikey-service', + base_url='https://api.apikey-service.com', + auth=sample_api_key_auth_config, + apis=sample_api_configs, + ) + + +# ============================================================================ +# YAML Configuration Fixtures +# ============================================================================ + + +@pytest.fixture +def sample_yaml_config(): + """Sample YAML service configuration.""" + return { + 'service': { + 'id': 'yaml-test-service', + 'base_url': 'https://api.yaml-test.com', + 'auth': { + 'id': 'yaml-auth', + 'type': 'bearer', + 'version': 'v1', + 'token': 'yaml-test-token', + 'additional_headers': {'X-YAML-Test': 'true'}, + }, + 'apis': [ + { + 'id': 'yaml-api', + 'path': '/yaml/test', + 'backend_path': '/yaml/test', + 'method': 'GET', + 'version': 'v1', + 'additional_headers': {'X-API-Test': 'yaml'}, + } + ], + } + } + + +@pytest.fixture +def sample_service_yaml_map(sample_yaml_config): + """Sample service definitions stored as YAML strings.""" + another_config = { + 'service': { + 'id': 'another-service', + 'base_url': 'https://api.another.com', + 'auth': { + 'id': 'another-auth', + 'type': 'basic', + 'username': 'user', + 'password': 'pass', + }, + 'apis': [ + { + 'id': 'another-api', + 'path': '/another', + 'backend_path': '/another', + 'method': 'POST', + } + ], + } + } + + crm_config = { + 'service': { + 'id': 'crm-service', + 'base_url': 'https://api.crm-system.com', + 'auth': { + 'id': 'crm-auth', + 'type': 'bearer', + 'token': 'crm-test-token', + }, + 'apis': [ + { + 'id': 'get-customers', + 'path': '/customers', + 'backend_path': '/customers', + 'method': 'GET', + } + ], + } + } + + return { + sample_yaml_config['service']['id']: yaml.dump(sample_yaml_config), + another_config['service']['id']: yaml.dump(another_config), + crm_config['service']['id']: yaml.dump(crm_config), + } + + +@pytest.fixture +def mock_api_services_manager(sample_service_yaml_map): + """Mock ApiServicesManager backed by sample YAML definitions.""" + return MockApiServicesManager(service_yaml_map=sample_service_yaml_map.copy()) + + +@pytest.fixture +def mock_cloud_storage_manager(): + """Mock cloud storage manager fixture.""" + return MockCloudStorageManager() + + +@pytest.fixture +def mock_api_service_repository(): + """Mock API services repository fixture.""" + return MockApiServicesRepository() + + +# ============================================================================ +# Component Fixtures +# ============================================================================ + + +@pytest.fixture +async def service_registry(mock_api_services_manager): + """Service registry with loaded configurations from mock manager.""" + registry = ServiceRegistry(mock_api_services_manager) + await registry.load_from_db() + return registry + + +@pytest.fixture +def service_parser(): + """Service definition parser.""" + return ServiceDefinitionParser() + + +@pytest.fixture +def auth_manager(): + """Authentication manager.""" + return AuthManager() + + +@pytest.fixture +def pipeline_builder(): + """Pipeline builder.""" + return PipelineBuilder() + + +@pytest.fixture +def pipeline_cache(): + """Pipeline cache.""" + return PipelineCache() + + +@pytest.fixture +async def api_proxy(service_registry, mock_api_services_manager): + """API proxy with loaded service registry.""" + return ApiProxy(service_registry, mock_api_services_manager) + + +@pytest.fixture +async def proxy_router(service_registry, mock_api_services_manager): + """Proxy router with loaded service registry.""" + return ProxyRouter(service_registry, mock_api_services_manager) + + +# ============================================================================ +# Container Fixtures +# ============================================================================ + + +class MockResponseFormatter: + def buildSuccessResponse(self, data: Any): + meta = Meta(status='success', code=1) + if hasattr(data, 'dict'): + data = data.dict() + return ResponseModel(meta=meta, data=data).model_dump() + + def buildErrorResponse(self, error: str): + meta = Meta(status='failure', code=-1, error=error) + return ResponseModel(meta=meta).model_dump() + + +@pytest.fixture +def mock_response_formatter(): + return MockResponseFormatter() + + +@pytest.fixture +def api_services_container( + mock_api_service_repository, + mock_cloud_storage_manager, + mock_db_client, + mock_cache_manager, + mock_api_services_manager, + mock_response_formatter, +): + """Configured API services container.""" + container = create_api_services_container( + api_service_repository=mock_api_service_repository, + cloud_storage_manager=mock_cloud_storage_manager, + db_client=mock_db_client, + cache_manager=mock_cache_manager, + response_formatter=mock_response_formatter, + ) + container.api_service_manager.override(providers.Object(mock_api_services_manager)) + container.config.from_dict({'api_service': {'application_bucket': 'test-bucket'}}) + return container + + +@pytest.fixture +async def initialized_container(api_services_container): + """API services container with initialized components.""" + # Initialize the service registry + service_registry = api_services_container.initialized_service_registry() + + # Load services from DB (async operation) + if service_registry.api_service_manager: + await service_registry.load_from_db() + + # Reload routes after services are loaded + proxy_router = api_services_container.proxy_router() + proxy_router.reload_routes() + + # Wire the container + api_services_container.wire(modules=[]) + + return api_services_container + + +# ============================================================================ +# Pipeline Context Fixtures +# ============================================================================ + + +@pytest.fixture +def sample_pipeline_context(): + """Sample pipeline context for testing.""" + return PipelineContext( + service_id='test-service', + api_id='get-users', + api_version='v1', + method='POST', + path='/test/path', + query_params={'limit': '10', 'offset': '0'}, + headers={'User-Agent': 'test-client', 'Content-Type': 'application/json'}, + body={'test': 'data'}, + ) + + +@pytest.fixture +def authenticated_pipeline_context(sample_pipeline_context): + """Pipeline context with authentication headers.""" + context = sample_pipeline_context + context.auth_headers = {'Authorization': 'Bearer test-token'} + context.backend_headers = {'Authorization': 'Bearer test-token'} + return context + + +# ============================================================================ +# HTTP Mock Fixtures +# ============================================================================ + + +@pytest.fixture +def mock_httpx_response(): + """Mock httpx response.""" + mock_response = Mock() + mock_response.status_code = 200 + mock_response.headers = {'Content-Type': 'application/json'} + mock_response.json.return_value = {'success': True, 'data': 'test'} + mock_response.text = '{"success": true, "data": "test"}' + mock_response.raise_for_status.return_value = None + return mock_response + + +@pytest.fixture +def mock_httpx_client(mock_httpx_response): + """Mock httpx client.""" + from unittest.mock import MagicMock + + mock_client = MagicMock() + mock_client.request.return_value = mock_httpx_response + mock_client.__enter__.return_value = mock_client + mock_client.__exit__.return_value = None + return mock_client + + +# ============================================================================ +# FastAPI Test Client Fixtures +# ============================================================================ + + +@pytest.fixture +async def test_client(initialized_container): + """FastAPI test client for the API services module.""" + from fastapi.testclient import TestClient + from fastapi import FastAPI + + app = FastAPI() + router = initialized_container.router() + app.include_router(router) # No prefix - routes already have /floware/v1 + + return TestClient(app) + + +# ============================================================================ +# Async Test Fixtures +# ============================================================================ + + +@pytest.fixture +async def async_api_proxy(service_registry, mock_api_services_manager): + """Async API proxy fixture.""" + proxy = ApiProxy(service_registry, mock_api_services_manager) + return proxy + + +@pytest.fixture +async def async_mock_dependencies(): + """Async mock dependencies.""" + db_client = MockDatabaseClient() + cache_manager = MockCacheManager() + + await db_client.connect() + db_client.run_migration() + + return {'db_client': db_client, 'cache_manager': cache_manager} + + +# ============================================================================ +# Utility Fixtures +# ============================================================================ + + +@pytest.fixture +def sample_request_data(): + """Sample request data for testing.""" + return { + 'service_id': 'test-service', + 'api_id': 'get-users', + 'api_version': 'v1', + 'method': 'POST', + 'path': '/test', + 'query_params': {'page': '1'}, + 'headers': {'Authorization': 'Bearer test'}, + 'body': {'filter': 'active'}, + } + + +@pytest.fixture +def sample_backend_response(): + """Sample backend response data.""" + return { + 'users': [ + {'id': 1, 'name': 'John', 'email': 'john@test.com'}, + {'id': 2, 'name': 'Jane', 'email': 'jane@test.com'}, + ], + 'total': 2, + 'page': 1, + } + + +# ============================================================================ +# Cleanup Fixtures +# ============================================================================ + + +@pytest.fixture(autouse=True) +def cleanup_after_test(): + """Cleanup fixture that runs after each test.""" + yield + # Cleanup code here if needed + pass + + +# ============================================================================ +# Parametrized Fixtures +# ============================================================================ + + +@pytest.fixture(params=[AuthType.BEARER, AuthType.BASIC, AuthType.API_KEY]) +def auth_type(request): + """Parametrized auth type fixture.""" + return request.param + + +@pytest.fixture( + params=[HttpMethod.GET, HttpMethod.POST, HttpMethod.PUT, HttpMethod.DELETE] +) +def http_method(request): + """Parametrized HTTP method fixture.""" + return request.param + + +# ============================================================================ +# Configuration Override Fixtures +# ============================================================================ + + +@pytest.fixture +def override_config(): + """Configuration overrides for testing.""" + return {'timeout': 5, 'max_retries': 1, 'log_level': 'DEBUG'} + + +# ============================================================================ +# Error Simulation Fixtures +# ============================================================================ + + +@pytest.fixture +def mock_network_error(): + """Mock network error for testing error handling.""" + import httpx + + return httpx.RequestError('Network error') + + +@pytest.fixture +def mock_http_error(): + """Mock HTTP error for testing error handling.""" + import httpx + + mock_response = Mock() + mock_response.status_code = 500 + return httpx.HTTPStatusError('Server error', request=Mock(), response=mock_response) diff --git a/wavefront/server/modules/api_services_module/tests/test_example.py b/wavefront/server/modules/api_services_module/tests/test_example.py new file mode 100644 index 00000000..be55ec80 --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/test_example.py @@ -0,0 +1,341 @@ +""" +Example test file demonstrating how to use the conftest.py fixtures. + +This file shows various testing patterns for the API services module +using the fixtures defined in conftest.py. +""" + +import pytest +from unittest.mock import patch +import httpx + +from api_services_module.models.service import AuthType, HttpMethod + + +class TestServiceRegistry: + """Test cases for ServiceRegistry using fixtures.""" + + def test_service_registry_initialization(self, service_registry): + """Test that service registry initializes correctly.""" + assert service_registry is not None + services = service_registry.get_all_services() + assert len(services) >= 2 # We have at least 2 test services + + def test_service_registry_get_service(self, service_registry): + """Test getting a specific service.""" + service = service_registry.get_service('yaml-test-service') + assert service is not None + assert service.id == 'yaml-test-service' + assert service.base_url == 'https://api.yaml-test.com' + + def test_service_registry_validation(self, service_registry): + """Test service validation.""" + service_ids = service_registry.get_service_ids() + for service_id in service_ids: + is_valid = service_registry.validate_service(service_id) + assert is_valid, f'Service {service_id} should be valid' + + +class TestAuthenticationHandlers: + """Test cases for authentication handlers.""" + + def test_bearer_auth_config(self, sample_bearer_auth_config): + """Test Bearer authentication configuration.""" + assert sample_bearer_auth_config.type == AuthType.BEARER + assert sample_bearer_auth_config.token == 'test-bearer-token-123' + assert 'X-Client-ID' in sample_bearer_auth_config.additional_headers + + def test_basic_auth_config(self, sample_basic_auth_config): + """Test Basic authentication configuration.""" + assert sample_basic_auth_config.type == AuthType.BASIC + assert sample_basic_auth_config.username == 'test_user' + assert sample_basic_auth_config.password == 'test_password' + + def test_api_key_auth_config(self, sample_api_key_auth_config): + """Test API Key authentication configuration.""" + assert sample_api_key_auth_config.type == AuthType.API_KEY + assert sample_api_key_auth_config.api_key == 'test-api-key-456' + assert sample_api_key_auth_config.api_key_header == 'X-API-Key' + + @pytest.mark.parametrize( + 'auth_type', [AuthType.BEARER, AuthType.BASIC, AuthType.API_KEY] + ) + def test_auth_types(self, auth_type): + """Test different authentication types.""" + assert auth_type in [AuthType.BEARER, AuthType.BASIC, AuthType.API_KEY] + + +class TestServiceDefinitions: + """Test cases for service definitions.""" + + def test_sample_service_definition(self, sample_service_definition): + """Test sample service definition structure.""" + assert sample_service_definition.id == 'test-service' + assert sample_service_definition.base_url == 'https://api.test-service.com' + assert len(sample_service_definition.apis) == 3 + + def test_api_configs(self, sample_api_configs): + """Test API configurations.""" + assert len(sample_api_configs) == 3 + + get_users_api = sample_api_configs[0] + assert get_users_api.id == 'get-users' + assert get_users_api.method == HttpMethod.GET + assert get_users_api.path == '/users' + + def test_service_get_api_by_id(self, sample_service_definition): + """Test getting API by ID from service definition.""" + api = sample_service_definition.get_api_by_id('get-users') + assert api is not None + assert api.id == 'get-users' + + # Test non-existent API + non_existent = sample_service_definition.get_api_by_id('non-existent') + assert non_existent is None + + +class TestPipelineComponents: + """Test cases for pipeline components.""" + + def test_pipeline_context(self, sample_pipeline_context): + """Test pipeline context initialization.""" + assert sample_pipeline_context.service_id == 'test-service' + assert sample_pipeline_context.api_id == 'get-users' + assert sample_pipeline_context.method == 'POST' + assert 'limit' in sample_pipeline_context.query_params + + def test_pipeline_context_trace(self, sample_pipeline_context): + """Test pipeline context tracing.""" + initial_trace_count = len(sample_pipeline_context.execution_trace) + + sample_pipeline_context.add_trace('test_stage', 'test message') + + assert len(sample_pipeline_context.execution_trace) == initial_trace_count + 1 + assert 'test_stage' in sample_pipeline_context.execution_trace[-1] + + def test_pipeline_builder(self, pipeline_builder, sample_service_definition): + """Test pipeline builder.""" + assert pipeline_builder is not None + + # Test auth pipeline creation + auth_pipeline = pipeline_builder.build_auth_pipeline(sample_service_definition) + assert auth_pipeline is not None + assert 'auth_pipeline' in auth_pipeline.get_name() + + def test_pipeline_cache(self, pipeline_cache): + """Test pipeline cache functionality.""" + assert pipeline_cache is not None + + # Test cache stats + stats = pipeline_cache.get_stats() + assert 'cached_pipelines' in stats + assert 'cache_keys' in stats + + +class TestApiProxy: + """Test cases for API proxy.""" + + def test_api_proxy_initialization(self, api_proxy): + """Test API proxy initialization.""" + assert api_proxy is not None + + def test_api_proxy_health_check(self, api_proxy): + """Test API proxy health check.""" + health = api_proxy.health_check() + assert health['status'] == 'healthy' + assert 'services_count' in health + assert 'auth_types_supported' in health + + def test_api_proxy_service_info(self, api_proxy): + """Test getting service information.""" + try: + info = api_proxy.get_service_info('yaml-test-service') + assert info['service_id'] == 'yaml-test-service' + assert 'apis' in info + except Exception: + # Service might not be loaded in this context + pass + + @pytest.mark.asyncio + async def test_api_proxy_process_request_error(self, async_api_proxy): + """Test API proxy request processing with expected error.""" + # This should return an error response since we don't have real backends + response = await async_api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path='/test', + query_params={'test': 'true'}, + headers={'User-Agent': 'test'}, + body={'test': 'data'}, + ) + + # Should return an error response, not raise an exception + assert response.meta['status'] in [ + 'pipeline_error', + 'error', + 'api_pipeline_error', + ] + assert 'trace' in response.meta + + +class TestDependencyInjection: + """Test cases for dependency injection container.""" + + def test_container_initialization(self, api_services_container): + """Test container initialization.""" + assert api_services_container is not None + + def test_container_service_registry(self, api_services_container): + """Test container service registry.""" + service_registry = api_services_container.service_registry() + assert service_registry is not None + + def test_container_auth_manager(self, api_services_container): + """Test container auth manager.""" + auth_manager = api_services_container.auth_manager() + assert auth_manager is not None + + def test_container_api_proxy(self, api_services_container): + """Test container API proxy.""" + api_proxy = api_services_container.api_proxy() + assert api_proxy is not None + + def test_container_router(self, api_services_container): + """Test container router.""" + router = api_services_container.router() + assert router is not None + assert len(router.routes) > 0 + + +class TestMockDependencies: + """Test cases for mock dependencies.""" + + @pytest.mark.asyncio + async def test_mock_db_client(self, mock_db_client): + """Test mock database client.""" + assert not mock_db_client.is_connected() + + await mock_db_client.connect() + assert mock_db_client.is_connected() + + mock_db_client.run_migration() + assert mock_db_client.migration_run + + def test_mock_cache_manager(self, mock_cache_manager): + """Test mock cache manager.""" + # Test cache operations + assert mock_cache_manager.get('test_key') is None + + mock_cache_manager.set('test_key', 'test_value') + assert mock_cache_manager.get('test_key') == 'test_value' + + mock_cache_manager.delete('test_key') + assert mock_cache_manager.get('test_key') is None + + +class TestHttpMocking: + """Test cases demonstrating HTTP mocking.""" + + def test_mock_httpx_response(self, mock_httpx_response): + """Test mock HTTP response.""" + assert mock_httpx_response.status_code == 200 + assert mock_httpx_response.json() == {'success': True, 'data': 'test'} + + def test_mock_httpx_client(self, mock_httpx_client, mock_httpx_response): + """Test mock HTTP client.""" + with mock_httpx_client as client: + response = client.request('GET', 'http://test.com') + assert response == mock_httpx_response + + @patch('httpx.Client') + def test_api_proxy_with_mocked_http( + self, mock_client_class, api_proxy, mock_httpx_response + ): + """Test API proxy with mocked HTTP client.""" + from unittest.mock import MagicMock + + # Configure the mock + mock_client = MagicMock() + mock_client.request.return_value = mock_httpx_response + mock_client.__enter__.return_value = mock_client + mock_client.__exit__.return_value = None + mock_client_class.return_value = mock_client + + # Verify the mock is configured correctly + assert mock_client_class.called is False + assert mock_client.request.return_value == mock_httpx_response + + +class TestErrorHandling: + """Test cases for error handling.""" + + def test_mock_network_error(self, mock_network_error): + """Test mock network error.""" + assert isinstance(mock_network_error, httpx.RequestError) + assert 'Network error' in str(mock_network_error) + + def test_mock_http_error(self, mock_http_error): + """Test mock HTTP error.""" + assert isinstance(mock_http_error, httpx.HTTPStatusError) + assert 'Server error' in str(mock_http_error) + + +class TestParametrizedFixtures: + """Test cases using parametrized fixtures.""" + + def test_different_auth_types(self, auth_type): + """Test with different authentication types.""" + assert auth_type in [AuthType.BEARER, AuthType.BASIC, AuthType.API_KEY] + assert isinstance(auth_type, AuthType) + + def test_different_http_methods(self, http_method): + """Test with different HTTP methods.""" + assert http_method in [ + HttpMethod.GET, + HttpMethod.POST, + HttpMethod.PUT, + HttpMethod.DELETE, + ] + assert isinstance(http_method, HttpMethod) + + +# ============================================================================ +# Integration Tests +# ============================================================================ + + +class TestIntegration: + """Integration test cases using multiple fixtures.""" + + def test_full_container_integration(self, initialized_container): + """Test full container integration.""" + # Test that all components work together + service_registry = initialized_container.service_registry() + auth_manager = initialized_container.auth_manager() + api_proxy = initialized_container.api_proxy() + router = initialized_container.router() + + assert service_registry is not None + assert auth_manager is not None + assert api_proxy is not None + assert router is not None + + @pytest.mark.asyncio + async def test_async_integration( + self, async_mock_dependencies, mock_api_services_manager + ): + """Test async integration with mock dependencies.""" + deps = async_mock_dependencies + + assert deps['db_client'].is_connected() + assert deps['cache_manager'] is not None + + # Test that we can create components with async dependencies + from api_services_module.config.registry import ServiceRegistry + + registry = ServiceRegistry(mock_api_services_manager) + await registry.load_from_db() + + assert len(registry.get_service_ids()) >= 2 diff --git a/wavefront/server/modules/api_services_module/tests/test_integration_with_mock_backend.py b/wavefront/server/modules/api_services_module/tests/test_integration_with_mock_backend.py new file mode 100644 index 00000000..bade18c6 --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/test_integration_with_mock_backend.py @@ -0,0 +1,765 @@ +""" +Integration tests with mock backend services for API Services Module. + +This test file demonstrates full end-to-end testing of the API proxy +with mock backend services that simulate real API responses. +""" + +import pytest +import asyncio +import json +from unittest.mock import patch, Mock, AsyncMock, MagicMock +from typing import Dict, Any + +from api_services_module.models.service import AuthType, HttpMethod + + +class MockBackendService: + """Mock backend service that simulates real API responses.""" + + def __init__(self, base_url: str = 'https://api.mock-service.com'): + self.base_url = base_url + self.request_log = [] + self.responses = {} + self.default_response = { + 'status': 'success', + 'data': {'message': 'Mock response'}, + 'timestamp': '2024-01-01T00:00:00Z', + } + + def set_response( + self, path: str, method: str, response: Dict[str, Any], status_code: int = 200 + ): + """Set a specific response for a path and method.""" + key = f'{method.upper()}:{path}' + self.responses[key] = { + 'response': response, + 'status_code': status_code, + 'headers': {'Content-Type': 'application/json'}, + } + + def get_response(self, path: str, method: str) -> Dict[str, Any]: + """Get response for a path and method.""" + key = f'{method.upper()}:{path}' + + # Try exact match first + if key in self.responses: + return self.responses[key] + + # Try to match without leading slash + path_no_slash = path.lstrip('/') + key_no_slash = f'{method.upper()}:/{path_no_slash}' + if key_no_slash in self.responses: + return self.responses[key_no_slash] + + # Try to match with leading slash + if not path.startswith('/'): + key_with_slash = f'{method.upper()}:/{path}' + if key_with_slash in self.responses: + return self.responses[key_with_slash] + + # Return default response + return { + 'response': self.default_response, + 'status_code': 200, + 'headers': {'Content-Type': 'application/json'}, + } + + def log_request( + self, method: str, url: str, headers: Dict[str, str], body: Any = None + ): + """Log incoming requests for verification.""" + self.request_log.append( + { + 'method': method, + 'url': url, + 'headers': dict(headers), + 'body': body, + 'timestamp': '2024-01-01T00:00:00Z', + } + ) + + def create_mock_response( + self, method: str, url: str, headers: Dict[str, str], **kwargs + ) -> Mock: + """Create a mock HTTP response.""" + # Extract path from URL + if url.startswith(self.base_url): + path = url[len(self.base_url) :] + else: + # Handle cases where URL might be just a path + from urllib.parse import urlparse + + parsed = urlparse(url) + path = parsed.path + + # Ensure path starts with / + if not path.startswith('/'): + path = '/' + path + + # Log the request + body = kwargs.get('json') or kwargs.get('content') + self.log_request(method, url, headers, body) + + # Get configured response + response_config = self.get_response(path, method) + + # Create mock response + from unittest.mock import Mock + import httpx + + mock_response = Mock() + mock_response.status_code = response_config['status_code'] + mock_response.headers = response_config['headers'] + mock_response.json.return_value = response_config['response'] + mock_response.text = json.dumps(response_config['response']) + + # Configure raise_for_status to behave like real httpx + def raise_for_status(): + if 400 <= mock_response.status_code < 600: + raise httpx.HTTPStatusError( + f'{mock_response.status_code} Error', + request=Mock(), + response=mock_response, + ) + + mock_response.raise_for_status.side_effect = raise_for_status + + return mock_response + + +@pytest.fixture +def mock_backend(): + """Create a mock backend service.""" + return MockBackendService() + + +@pytest.fixture +def configured_mock_backend(mock_backend): + """Mock backend with pre-configured responses.""" + # User management endpoints + mock_backend.set_response( + '/users', + 'GET', + { + 'users': [ + { + 'id': 1, + 'name': 'John Doe', + 'email': 'john@example.com', + 'active': True, + }, + { + 'id': 2, + 'name': 'Jane Smith', + 'email': 'jane@example.com', + 'active': True, + }, + ], + 'total': 2, + 'page': 1, + }, + ) + + mock_backend.set_response( + '/users', + 'POST', + { + 'id': 3, + 'name': 'New User', + 'email': 'newuser@example.com', + 'active': True, + 'created_at': '2024-01-01T00:00:00Z', + }, + status_code=201, + ) + + mock_backend.set_response( + '/users/1', + 'GET', + { + 'id': 1, + 'name': 'John Doe', + 'email': 'john@example.com', + 'active': True, + 'created_at': '2023-01-01T00:00:00Z', + 'orders': [ + { + 'order_id': 'ORD001', + 'order_date': '2024-01-01', + 'order_total': 99.99, + }, + { + 'order_id': 'ORD002', + 'order_date': '2024-01-02', + 'order_total': 149.99, + }, + ], + }, + ) + + # Orders endpoint with field mapping + mock_backend.set_response( + '/users/1/orders', + 'GET', + { + 'orders': [ + { + 'order_id': 'ORD001', + 'order_date': '2024-01-01T10:00:00Z', + 'order_total': 99.99, + 'customer_info': {'name': 'John Doe', 'email': 'john@example.com'}, + 'items': [{'name': 'Product A', 'price': 99.99}], + }, + { + 'order_id': 'ORD002', + 'order_date': '2024-01-02T15:30:00Z', + 'order_total': 149.99, + 'customer_info': {'name': 'John Doe', 'email': 'john@example.com'}, + 'items': [{'name': 'Product B', 'price': 149.99}], + }, + ], + 'total_orders': 2, + }, + ) + + # Error responses + mock_backend.set_response( + '/users/999', + 'GET', + {'error': 'User not found', 'code': 'USER_NOT_FOUND'}, + status_code=404, + ) + + return mock_backend + + +@pytest.fixture +def mock_httpx_with_backend(configured_mock_backend): + """Mock httpx client that uses the configured backend.""" + + async def mock_request(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return configured_mock_backend.create_mock_response( + method, url, headers, **kwargs + ) + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.request.side_effect = mock_request + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client_class.return_value = mock_client + yield mock_client, configured_mock_backend + + +class TestFullIntegrationWithMockBackend: + """Full integration tests with mock backend services.""" + + @pytest.mark.asyncio + async def test_bearer_auth_integration(self, api_proxy, mock_httpx_with_backend): + """Test full integration with Bearer authentication.""" + mock_client, mock_backend = mock_httpx_with_backend + + # Process request through API proxy + response = await api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path='/test', + query_params={'limit': '10'}, + headers={'User-Agent': 'integration-test'}, + body={'filter': 'active'}, + ) + + # Verify response structure + assert response.meta['status'] == 'success' + assert 'trace' in response.meta + assert response.data is not None + + # Verify authentication was applied + assert len(mock_backend.request_log) > 0 + request = mock_backend.request_log[0] + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer yaml-test-token' + + # Verify additional headers were added + assert 'X-YAML-Test' in request['headers'] + assert request['headers']['X-YAML-Test'] == 'true' + + @pytest.mark.asyncio + async def test_get_users_with_bearer_auth(self, api_proxy, mock_httpx_with_backend): + """Test GET users endpoint with Bearer authentication.""" + mock_client, mock_backend = mock_httpx_with_backend + + # Configure mock backend for this specific test + mock_backend.base_url = 'https://api.yaml-test.com' + + # Set up the specific response for the path that will be called + mock_backend.set_response( + '/yaml/test', + 'GET', + { + 'users': [ + {'id': 1, 'name': 'John Doe', 'email': 'john@example.com'}, + {'id': 2, 'name': 'Jane Smith', 'email': 'jane@example.com'}, + ], + 'total': 2, + }, + ) + + response = await api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', # Client always uses POST + path='/users', + query_params={'page': '1', 'limit': '10'}, + headers={'User-Agent': 'test-client'}, + body={}, + ) + + # Verify successful response + assert response.meta['status'] == 'success' + assert response.data['users'] is not None + assert len(response.data['users']) == 2 + + # Verify request was logged with correct authentication + request = mock_backend.request_log[-1] + assert request['method'] == 'GET' # Backend method from config + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer yaml-test-token' + + @pytest.mark.asyncio + async def test_create_user_with_auth_headers( + self, api_proxy, mock_httpx_with_backend + ): + """Test POST create user with authentication and custom headers.""" + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.yaml-test.com' + + # Set up the specific response for the path that will be called + mock_backend.set_response( + '/yaml/test', + 'GET', + { + 'id': 3, + 'name': 'New User', + 'email': 'testuser@example.com', + 'created_at': '2024-01-01T00:00:00Z', + }, + ) + + user_data = {'name': 'Test User', 'email': 'testuser@example.com'} + + response = await api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path='/users', + headers={'Content-Type': 'application/json'}, + body=user_data, + ) + + # Verify response + assert response.meta['status'] == 'success' + assert response.data['id'] == 3 + assert response.data['name'] == 'New User' + + # Verify request details + request = mock_backend.request_log[-1] + assert request['method'] == 'GET' # From YAML config + # Note: GET requests typically don't have bodies, so we verify the auth instead + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer yaml-test-token' + + @pytest.mark.asyncio + async def test_output_mapping_integration( + self, sample_service_definition, mock_httpx_with_backend + ): + """Test output field mapping with mock backend.""" + from api_services_module.core.proxy import ApiProxy + from api_services_module.config.registry import ServiceRegistry + + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.test-service.com' + + # Set up the response with data that will be mapped + mock_backend.set_response( + '/users/{id}/orders', + 'GET', + { + 'order_id': 'ORD001', + 'order_date': '2024-01-01T10:00:00Z', + 'customer': {'name': 'John Doe'}, + }, + ) + + # Create service registry with our test service + registry = ServiceRegistry() + registry.register_service(sample_service_definition) + + # Create API proxy + proxy = ApiProxy(registry) + + response = await proxy.process_request( + service_id='test-service', + api_id='get-user-orders', + api_version='v1', + method='POST', + path='/users/1/orders', + headers={'Accept': 'application/json'}, + body={}, + ) + + # Verify response structure + assert response.meta['status'] == 'success' + + # Verify output mapping was applied + mapped_data = response.data + assert 'id' in mapped_data # Mapped from order_id + assert 'created_at' in mapped_data # Mapped from order_date + assert 'customer_name' in mapped_data # Mapped from customer.name + + # Verify authentication + request = mock_backend.request_log[-1] + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer test-bearer-token-123' + + @pytest.mark.asyncio + async def test_error_handling_integration(self, api_proxy, mock_httpx_with_backend): + """Test error handling with 500 response from backend.""" + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.yaml-test.com' + + # Configure 500 response to trigger pipeline error + mock_backend.set_response( + '/yaml/test', 'GET', {'error': 'Server Error'}, status_code=500 + ) + + response = await api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path='/users/999', # This path is ignored/mapped to /yaml/test by the API config + headers={'Accept': 'application/json'}, + body={}, + ) + + # Should return error response + assert response.meta['status'] in [ + 'pipeline_error', + 'error', + 'api_pipeline_error', + ] + assert 'trace' in response.meta + + @pytest.mark.asyncio + async def test_multiple_auth_types_integration(self, mock_httpx_with_backend): + """Test integration with different authentication types.""" + from api_services_module.config.registry import ServiceRegistry + from api_services_module.core.proxy import ApiProxy + from api_services_module.models.service import ( + ServiceDefinition, + AuthConfig, + ApiConfig, + ) + + mock_client, mock_backend = mock_httpx_with_backend + + # Create services with different auth types + services = [] + + # Bearer auth service + bearer_service = ServiceDefinition( + id='bearer-service', + base_url='https://api.bearer-test.com', + auth=AuthConfig( + id='bearer-auth', type=AuthType.BEARER, token='bearer-token-123' + ), + apis=[ + ApiConfig( + id='test-api', + path='/test', + method=HttpMethod.GET, + backend_path='/test', + ) + ], + ) + services.append(bearer_service) + + # Basic auth service + basic_service = ServiceDefinition( + id='basic-service', + base_url='https://api.basic-test.com', + auth=AuthConfig( + id='basic-auth', + type=AuthType.BASIC, + username='testuser', + password='testpass', + ), + apis=[ + ApiConfig( + id='test-api', + path='/test', + backend_path='/test', + method=HttpMethod.GET, + ) + ], + ) + services.append(basic_service) + + # API Key service + apikey_service = ServiceDefinition( + id='apikey-service', + base_url='https://api.apikey-test.com', + auth=AuthConfig( + id='apikey-auth', + type=AuthType.API_KEY, + api_key='apikey-123', + api_key_header='X-API-Key', + ), + apis=[ + ApiConfig( + id='test-api', + path='/test', + backend_path='/test', + method=HttpMethod.GET, + ) + ], + ) + services.append(apikey_service) + + # Create registry and proxy + registry = ServiceRegistry() + for service in services: + registry.register_service(service) + + proxy = ApiProxy(registry) + + # Test each auth type + for service in services: + mock_backend.base_url = service.base_url + + response = await proxy.process_request( + service_id=service.id, + api_id='test-api', + api_version='v1', + method='POST', + path='/test', + headers={'Accept': 'application/json'}, + body={}, + ) + + assert response.meta['status'] == 'success' + + # Verify correct authentication header + request = mock_backend.request_log[-1] + + if service.auth.type == AuthType.BEARER: + assert request['headers']['Authorization'] == 'Bearer bearer-token-123' + elif service.auth.type == AuthType.BASIC: + import base64 + + expected = base64.b64encode('testuser:testpass'.encode()).decode() + assert request['headers']['Authorization'] == f'Basic {expected}' + elif service.auth.type == AuthType.API_KEY: + assert request['headers']['X-API-Key'] == 'apikey-123' + + def test_pipeline_execution_trace(self, api_proxy, mock_httpx_with_backend): + """Test that pipeline execution trace is properly recorded.""" + mock_client, mock_backend = mock_httpx_with_backend + + # Use asyncio.run for this test since it's not marked as async + async def run_test(): + response = await api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path='/test', + headers={'User-Agent': 'trace-test'}, + body={'test': 'trace'}, + ) + + # Verify trace contains expected stages + trace = response.meta['trace'] + assert len(trace) > 0 + + # Check for key pipeline stages in trace + trace_text = ' '.join(trace) + assert 'auth_pipeline' in trace_text + assert ( + 'bearer_authenticator' in trace_text or 'Authentication' in trace_text + ) + assert 'request_sender' in trace_text or 'Request' in trace_text + + return response + + response = asyncio.run(run_test()) + assert response.meta['status'] == 'success' + + @pytest.mark.asyncio + async def test_concurrent_requests_integration( + self, api_proxy, mock_httpx_with_backend + ): + """Test handling multiple concurrent requests.""" + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.yaml-test.com' + + # Create multiple concurrent requests + tasks = [] + for i in range(5): + task = api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path=f'/users/{i}', + headers={'User-Agent': f'concurrent-test-{i}'}, + body={'user_id': i}, + ) + tasks.append(task) + + # Execute all requests concurrently + responses = await asyncio.gather(*tasks) + + # Verify all requests succeeded + for i, response in enumerate(responses): + assert response.meta['status'] == 'success' + + # Verify all requests were logged + assert len(mock_backend.request_log) >= 5 + + # Verify each request had proper authentication + for request in mock_backend.request_log[-5:]: + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer yaml-test-token' + + +class TestFastAPIIntegration: + """Test integration with FastAPI test client.""" + + def test_fastapi_router_integration(self, test_client, mock_httpx_with_backend): + """Test the FastAPI router with mock backend.""" + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.yaml-test.com' + + # Test services list + response = test_client.get('/v1/api-services') + assert response.status_code == 200 + data = response.json() + assert 'services' in data['data'] + + # Test specific service info (use a service that exists in real configs) + response = test_client.get('/v1/api-services/crm-service') + assert response.status_code == 200 + data = response.json() + assert data['data']['service_id'] == 'crm-service' + + def test_proxy_endpoint_integration(self, test_client, mock_httpx_with_backend): + """Test the main proxy endpoint with FastAPI client.""" + mock_client, mock_backend = mock_httpx_with_backend + mock_backend.base_url = 'https://api.crm-system.com' + + # Set up mock response for CRM service + mock_backend.set_response( + '/customers', + 'GET', + {'customers': [{'id': 1, 'name': 'Test Customer'}], 'total': 1}, + ) + + # Test proxy request (use real service from configs) + # Using main path-based route (alias routes have been removed) + response = test_client.post( + '/v1/api-services/crm-service/apis/v1/customers', + json={'test': 'data'}, + headers={'Content-Type': 'application/json'}, + ) + + assert response.status_code == 200 + data = response.json() + assert data['meta']['status'] == 'success' + assert 'trace' in data['meta'] + assert data['data'] is not None + + # Verify backend received the request + assert len(mock_backend.request_log) > 0 + request = mock_backend.request_log[-1] + assert 'Authorization' in request['headers'] + + +# Performance and load testing fixtures +@pytest.fixture +def performance_mock_backend(): + """Mock backend optimized for performance testing.""" + backend = MockBackendService() + + # Set up fast responses + for i in range(100): + backend.set_response( + f'/users/{i}', + 'GET', + {'id': i, 'name': f'User {i}', 'email': f'user{i}@example.com'}, + ) + + return backend + + +class TestPerformanceIntegration: + """Performance and load testing with mock backend.""" + + @pytest.mark.asyncio + async def test_high_throughput_requests(self, api_proxy, performance_mock_backend): + """Test handling high throughput requests.""" + with patch('httpx.AsyncClient') as mock_client_class: + from unittest.mock import MagicMock, AsyncMock + + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return performance_mock_backend.create_mock_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + + mock_client_class.return_value = mock_client + + # Create many concurrent requests + tasks = [] + for i in range(50): # Reduced for test performance + task = api_proxy.process_request( + service_id='yaml-test-service', + api_id='yaml-api', + api_version='v1', + method='POST', + path=f'/users/{i}', + headers={'User-Agent': 'performance-test'}, + body={'user_id': i}, + ) + tasks.append(task) + + # Measure execution time + import time + + start_time = time.time() + responses = await asyncio.gather(*tasks) + end_time = time.time() + + # Verify all requests succeeded + success_count = sum(1 for r in responses if r.meta['status'] == 'success') + assert success_count == 50 + + # Basic performance assertion (should complete in reasonable time) + execution_time = end_time - start_time + assert execution_time < 10.0 # Should complete within 10 seconds + + print(f'Processed 50 requests in {execution_time:.2f} seconds') + print(f'Average: {execution_time/50*1000:.2f}ms per request') diff --git a/wavefront/server/modules/api_services_module/tests/test_service_deletion.py b/wavefront/server/modules/api_services_module/tests/test_service_deletion.py new file mode 100644 index 00000000..1d040819 --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/test_service_deletion.py @@ -0,0 +1,175 @@ +import sys + +# Add paths to sys.path +import os +import asyncio +import logging +from unittest.mock import MagicMock, AsyncMock +from fastapi import FastAPI +from common_module.response_formatter import ResponseFormatter +from api_services_module.core.router import ProxyRouter +from api_services_module.core.proxy import ApiProxy +from api_services_module.config.registry import ServiceRegistry +from api_services_module.models.service import ( + ServiceDefinition, + AuthConfig, + ApiConfig, + AuthType, + HttpMethod, +) + + +# Mock redis module before imports +def create_module_mock(): + """Create a MagicMock configured to support nested module imports.""" + mock = MagicMock() + mock.__path__ = [] # Required for nested package imports + return mock + + +sys.modules['redis'] = MagicMock() +sys.modules['flo_cloud'] = create_module_mock() +sys.modules['flo_cloud.gcp'] = create_module_mock() +sys.modules['flo_cloud.gcp.bigquery'] = create_module_mock() +sys.modules['flo_cloud.cloud_storage'] = create_module_mock() + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), '../../common_module')) +) +sys.path.append( + os.path.abspath( + os.path.join(os.path.dirname(__file__), '../../api_services_module') + ) +) +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), '../../db_repo_module')) +) + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +async def test_service_deletion_cleanup(): + logger.info('Starting test_service_deletion_cleanup') + + # Setup + api_services_manager = AsyncMock() + service_registry = ServiceRegistry(api_services_manager) + response_formatter = ResponseFormatter() + + # Create a dummy service definition + service_def = ServiceDefinition( + id='test-service', + base_url='http://test.com', + auth=AuthConfig(id='test-auth', type=AuthType.BEARER, token='token'), + apis=[ + ApiConfig( + id='test-api', path='/test', backend_path='/test', method=HttpMethod.GET + ) + ], + ) + + # Register service manually + service_registry.register_service(service_def) + + api_change_publisher = AsyncMock() + api_proxy = ApiProxy(service_registry, api_services_manager, api_change_publisher) + + # Initialize Proxy and Router + proxy_router = ProxyRouter( + api_proxy, service_registry, response_formatter, api_services_manager + ) + app = FastAPI() + proxy_router.set_app(app) + + # Force route setup + proxy_router._setup_dynamic_api_routes() + + # Verify initial state + logger.info('Verifying initial state...') + assert ( + service_registry.get_service('test-service') is not None + ), 'Service not in registry' + # Initialize auth manager manually since we didn't go through full init flow + proxy_router.proxy._initialize_auth_manager() + assert ( + proxy_router.proxy.auth_manager.get_auth_handler('test-service') is not None + ), 'Auth handler not found' + + # Cache a pipeline + pipeline = proxy_router.proxy._get_or_build_pipeline( + service_def, service_def.apis[0] + ) + assert ( + proxy_router.proxy.pipeline_cache.get_pipeline('test-service', 'test-api') + is not None + ), 'Pipeline not cached' + + # Verify routes exist + route_names = [r.name for r in proxy_router.router.routes] + assert 'proxy_test-service_test-api_v1' in route_names, 'Route not found in router' + + app_route_names = [r.name for r in app.router.routes] + assert ( + 'proxy_test-service_test-api_v1_app' in app_route_names + ), 'Route not found in app' + + logger.info('Initial state verified.') + + # --- ACTION: Delete Service --- + logger.info('Deleting service...') + await proxy_router.proxy.delete_api_services('test-service') + # Also remove routes (this is what the endpoint does) + proxy_router.remove_service_routes('test-service') + + # --- VERIFICATION --- + logger.info('Verifying cleanup...') + + failures = [] + + # 1. Registry + service = service_registry.get_service('test-service') + if service: + failures.append('FAIL: Service still in registry') + else: + logger.info('PASS: Service removed from registry') + + # 2. Auth Manager + auth = proxy_router.proxy.auth_manager.get_auth_handler('test-service') + if auth: + failures.append('FAIL: Auth handler still exists') + else: + logger.info('PASS: Auth handler removed') + + # 3. Pipeline Cache + pipeline = proxy_router.proxy.pipeline_cache.get_pipeline( + 'test-service', 'test-api' + ) + if pipeline: + failures.append('FAIL: Pipeline still cached') + else: + logger.info('PASS: Pipeline invalidated') + + # 4. Routes + route_names = [r.name for r in proxy_router.router.routes] + if 'proxy_test-service_test-api_v1' in route_names: + failures.append('FAIL: Route still exists in router') + else: + logger.info('PASS: Route removed from router') + + app_route_names = [r.name for r in app.router.routes] + if 'proxy_test-service_test-api_v1_app' in app_route_names: + failures.append('FAIL: Route still exists in app') + else: + logger.info('PASS: Route removed from app') + + if failures: + logger.error('\n'.join(failures)) + sys.exit(1) + else: + logger.info('All checks passed!') + + +if __name__ == '__main__': + asyncio.run(test_service_deletion_cleanup()) diff --git a/wavefront/server/modules/api_services_module/tests/test_simple_integration.py b/wavefront/server/modules/api_services_module/tests/test_simple_integration.py new file mode 100644 index 00000000..7afb2a3f --- /dev/null +++ b/wavefront/server/modules/api_services_module/tests/test_simple_integration.py @@ -0,0 +1,581 @@ +""" +Simple integration test with mock backend for API Services Module. + +This test demonstrates the core integration functionality with a +straightforward mock backend setup. +""" + +import pytest +import asyncio +import json +from unittest.mock import patch, MagicMock, AsyncMock +import base64 + +from api_services_module.models.service import ( + ServiceDefinition, + AuthConfig, + ApiConfig, + AuthType, + HttpMethod, +) +from api_services_module.config.registry import ServiceRegistry +from api_services_module.core.proxy import ApiProxy + + +class SimpleMockBackend: + """Simple mock backend for integration testing.""" + + def __init__(self): + self.requests = [] + self.response_data = { + 'success': True, + 'message': 'Mock backend response', + 'timestamp': '2024-01-01T00:00:00Z', + } + + def create_response(self, method: str, url: str, headers: dict, **kwargs): + """Create a mock HTTP response and log the request.""" + # Log the request for verification + self.requests.append( + { + 'method': method, + 'url': url, + 'headers': dict(headers), + 'body': kwargs.get('json') or kwargs.get('content'), + } + ) + + # Create mock response + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.headers = {'Content-Type': 'application/json'} + mock_response.json.return_value = self.response_data + mock_response.text = json.dumps(self.response_data) + mock_response.raise_for_status.return_value = None + + return mock_response + + +@pytest.fixture +def simple_mock_backend(): + """Create a simple mock backend.""" + return SimpleMockBackend() + + +@pytest.fixture +def bearer_service(): + """Create a service with Bearer authentication.""" + return ServiceDefinition( + id='test-bearer-service', + base_url='https://api.bearer-test.com', + auth=AuthConfig( + id='bearer-auth', + type=AuthType.BEARER, + token='test-bearer-token-123', + additional_headers={'X-Client-ID': 'test-client'}, + ), + apis=[ + ApiConfig( + id='get-data', + path='/data', + backend_path='/data', + method=HttpMethod.GET, + additional_headers={'X-API-Feature': 'data-retrieval'}, + ), + ApiConfig( + id='create-item', + path='/items', + backend_path='/items', + method=HttpMethod.POST, + additional_headers={'X-API-Feature': 'item-creation'}, + ), + ], + ) + + +@pytest.fixture +def basic_service(): + """Create a service with Basic authentication.""" + return ServiceDefinition( + id='test-basic-service', + base_url='https://api.basic-test.com', + auth=AuthConfig( + id='basic-auth', + type=AuthType.BASIC, + username='testuser', + password='testpass123', + ), + apis=[ + ApiConfig( + id='get-users', + path='/users', + backend_path='/users', + method=HttpMethod.GET, + ) + ], + ) + + +@pytest.fixture +def apikey_service(): + """Create a service with API Key authentication.""" + return ServiceDefinition( + id='test-apikey-service', + base_url='https://api.apikey-test.com', + auth=AuthConfig( + id='apikey-auth', + type=AuthType.API_KEY, + api_key='secret-api-key-456', + api_key_header='X-API-Key', + ), + apis=[ + ApiConfig( + id='get-status', + path='/status', + backend_path='/status', + method=HttpMethod.GET, + ) + ], + ) + + +@pytest.fixture +def test_registry(bearer_service, basic_service, apikey_service): + """Create a service registry with test services.""" + registry = ServiceRegistry() + registry.register_service(bearer_service) + registry.register_service(basic_service) + registry.register_service(apikey_service) + return registry + + +@pytest.fixture +def test_proxy(test_registry): + """Create an API proxy with test services.""" + return ApiProxy(test_registry) + + +class TestSimpleIntegration: + """Simple integration tests with mock backend.""" + + @pytest.mark.asyncio + async def test_bearer_auth_integration(self, test_proxy, simple_mock_backend): + """Test Bearer authentication integration.""" + + with patch('httpx.AsyncClient') as mock_client_class: + # Setup mock client + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + # Make request through proxy + response = await test_proxy.process_request( + service_id='test-bearer-service', + api_id='get-data', + api_version='v1', + method='POST', # Client method + path='/test-path', + query_params={'limit': '10'}, + headers={'User-Agent': 'test-client'}, + body={'filter': 'active'}, + ) + + # Verify response + assert response.meta['status'] == 'success' + assert response.data['success'] is True + assert 'trace' in response.meta + + # Verify request was made with correct authentication + assert len(simple_mock_backend.requests) == 1 + request = simple_mock_backend.requests[0] + + # Check authentication header + assert 'Authorization' in request['headers'] + assert request['headers']['Authorization'] == 'Bearer test-bearer-token-123' + + # Check additional headers + assert request['headers']['X-Client-ID'] == 'test-client' + assert request['headers']['X-API-Feature'] == 'data-retrieval' + + # Check method mapping (POST from client -> GET to backend) + assert request['method'] == 'GET' + + # Check URL construction + assert 'https://api.bearer-test.com/data' in request['url'] + + @pytest.mark.asyncio + async def test_basic_auth_integration(self, test_proxy, simple_mock_backend): + """Test Basic authentication integration.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + response = await test_proxy.process_request( + service_id='test-basic-service', + api_id='get-users', + api_version='v1', + method='POST', + path='/users', + headers={'Accept': 'application/json'}, + body={}, + ) + + # Verify response + assert response.meta['status'] == 'success' + + # Verify Basic auth header + request = simple_mock_backend.requests[0] + assert 'Authorization' in request['headers'] + + # Verify Basic auth encoding + expected_credentials = base64.b64encode( + 'testuser:testpass123'.encode() + ).decode() + assert ( + request['headers']['Authorization'] == f'Basic {expected_credentials}' + ) + + @pytest.mark.asyncio + async def test_api_key_auth_integration(self, test_proxy, simple_mock_backend): + """Test API Key authentication integration.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + response = await test_proxy.process_request( + service_id='test-apikey-service', + api_id='get-status', + api_version='v1', + method='POST', + path='/status', + headers={'Accept': 'application/json'}, + body={}, + ) + + # Verify response + assert response.meta['status'] == 'success' + + # Verify API Key header + request = simple_mock_backend.requests[0] + assert 'X-API-Key' in request['headers'] + assert request['headers']['X-API-Key'] == 'secret-api-key-456' + + @pytest.mark.asyncio + async def test_pipeline_execution_trace(self, test_proxy, simple_mock_backend): + """Test that pipeline execution is properly traced.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + response = await test_proxy.process_request( + service_id='test-bearer-service', + api_id='get-data', + api_version='v1', + method='POST', + path='/data', + headers={'User-Agent': 'trace-test'}, + body={'test': 'trace'}, + ) + + # Verify trace contains expected pipeline stages + trace = response.meta['trace'] + assert len(trace) > 0 + + trace_text = ' '.join(trace) + + # Check for key pipeline stages + assert 'proxy' in trace_text.lower() + assert any('auth' in entry.lower() for entry in trace) + assert any('request' in entry.lower() for entry in trace) + + @pytest.mark.asyncio + async def test_error_handling_integration(self, test_proxy): + """Test error handling when backend is unreachable.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + # Simulate network error + import httpx + + mock_client.request.side_effect = httpx.ConnectError('Connection failed') + mock_client_class.return_value = mock_client + + response = await test_proxy.process_request( + service_id='test-bearer-service', + api_id='get-data', + api_version='v1', + method='POST', + path='/data', + headers={'User-Agent': 'error-test'}, + body={}, + ) + + # Should return error response, not raise exception + assert response.meta['status'] in [ + 'pipeline_error', + 'error', + 'api_pipeline_error', + ] + assert 'trace' in response.meta + assert ( + 'Connection failed' in response.meta['message'] + or 'Backend request failed' in response.meta['message'] + ) + + @pytest.mark.asyncio + async def test_concurrent_requests(self, test_proxy, simple_mock_backend): + """Test handling multiple concurrent requests.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + # Create multiple concurrent requests + tasks = [] + for i in range(5): + task = test_proxy.process_request( + service_id='test-bearer-service', + api_id='get-data', + api_version='v1', + method='POST', + path=f'/data/{i}', + headers={'User-Agent': f'concurrent-test-{i}'}, + body={'request_id': i}, + ) + tasks.append(task) + + # Execute all requests concurrently + responses = await asyncio.gather(*tasks) + + # Verify all requests succeeded + for response in responses: + assert response.meta['status'] == 'success' + + # Verify all requests were logged + assert len(simple_mock_backend.requests) == 5 + + # Verify each request had proper authentication + for request in simple_mock_backend.requests: + assert ( + request['headers']['Authorization'] + == 'Bearer test-bearer-token-123' + ) + + def test_service_registry_integration(self, test_registry): + """Test service registry functionality.""" + # Verify services are registered + assert len(test_registry.get_service_ids()) == 3 + + # Test service retrieval + bearer_service = test_registry.get_service('test-bearer-service') + assert bearer_service is not None + assert bearer_service.auth.type == AuthType.BEARER + + # Test API retrieval + api = bearer_service.get_api_by_id('get-data') + assert api is not None + assert api.method == HttpMethod.GET + + # Test validation + for service_id in test_registry.get_service_ids(): + assert test_registry.validate_service(service_id) + + def test_proxy_health_check(self, test_proxy): + """Test proxy health check functionality.""" + health = test_proxy.health_check() + + assert health['status'] == 'healthy' + assert health['services_count'] == 3 + assert health['apis_count'] == 4 # Total APIs across all services + assert 'bearer' in health['auth_types_supported'] + assert 'basic' in health['auth_types_supported'] + assert 'api_key' in health['auth_types_supported'] + + +class TestServiceConfiguration: + """Test service configuration and validation.""" + + def test_bearer_service_config(self, bearer_service): + """Test Bearer service configuration.""" + assert bearer_service.id == 'test-bearer-service' + assert bearer_service.auth.type == AuthType.BEARER + assert bearer_service.auth.token == 'test-bearer-token-123' + assert len(bearer_service.apis) == 2 + + def test_basic_service_config(self, basic_service): + """Test Basic service configuration.""" + assert basic_service.id == 'test-basic-service' + assert basic_service.auth.type == AuthType.BASIC + assert basic_service.auth.username == 'testuser' + assert basic_service.auth.password == 'testpass123' + + def test_apikey_service_config(self, apikey_service): + """Test API Key service configuration.""" + assert apikey_service.id == 'test-apikey-service' + assert apikey_service.auth.type == AuthType.API_KEY + assert apikey_service.auth.api_key == 'secret-api-key-456' + assert apikey_service.auth.api_key_header == 'X-API-Key' + + +class TestPipelineComponents: + """Test individual pipeline components.""" + + @pytest.mark.asyncio + async def test_auth_pipeline_execution(self, test_proxy, simple_mock_backend): + """Test authentication pipeline execution.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + # Test each auth type + test_cases = [ + ('test-bearer-service', 'get-data', 'Bearer test-bearer-token-123'), + ( + 'test-apikey-service', + 'get-status', + None, + ), # API key uses different header + ] + + for service_id, api_id, expected_auth in test_cases: + simple_mock_backend.requests.clear() + + response = await test_proxy.process_request( + service_id=service_id, + api_id=api_id, + api_version='v1', + method='POST', + path='/test', + headers={'User-Agent': 'auth-test'}, + body={}, + ) + + assert response.meta['status'] == 'success' + + request = simple_mock_backend.requests[0] + if expected_auth: + assert request['headers']['Authorization'] == expected_auth + else: + # API key case + assert 'X-API-Key' in request['headers'] + + +# Performance test +class TestPerformance: + """Basic performance tests.""" + + @pytest.mark.asyncio + async def test_throughput(self, test_proxy, simple_mock_backend): + """Test basic throughput with mock backend.""" + + with patch('httpx.AsyncClient') as mock_client_class: + mock_client = MagicMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + async def async_side_effect(method, url, **kwargs): + headers = kwargs.pop('headers', {}) + return simple_mock_backend.create_response( + method, url, headers, **kwargs + ) + + mock_client.request.side_effect = async_side_effect + mock_client_class.return_value = mock_client + + # Measure time for batch of requests + import time + + start_time = time.time() + + tasks = [] + for i in range(20): # Smaller batch for test performance + task = test_proxy.process_request( + service_id='test-bearer-service', + api_id='get-data', + api_version='v1', + method='POST', + path=f'/perf-test/{i}', + headers={'User-Agent': 'perf-test'}, + body={'test_id': i}, + ) + tasks.append(task) + + responses = await asyncio.gather(*tasks) + end_time = time.time() + + # Verify all succeeded + success_count = sum(1 for r in responses if r.meta['status'] == 'success') + assert success_count == 20 + + # Basic performance check + execution_time = end_time - start_time + assert execution_time < 5.0 # Should complete within 5 seconds + + print(f'Processed 20 requests in {execution_time:.2f} seconds') + print(f'Average: {execution_time/20*1000:.2f}ms per request') diff --git a/wavefront/server/modules/auth_module/auth_module/auth_container.py b/wavefront/server/modules/auth_module/auth_module/auth_container.py new file mode 100644 index 00000000..b277e409 --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/auth_container.py @@ -0,0 +1,88 @@ +from auth_module.services.client_token_service import ClientTokenService +from auth_module.services.outlook_service import OutlookService +from auth_module.services.superset_service import SupersetService +from auth_module.services.token_service import TokenService +from common_module.feature.feature_flag import is_feature_enabled +from common_module.feature.feature_flag import SUPERSET_FLAG +from db_repo_module.models.auth_secrets import AuthSecrets +from db_repo_module.models.resource import Resource +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector import containers +from dependency_injector import providers +from flo_cloud.kms import FloKmsService + + +class AuthContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + db_client = providers.Dependency() + cache_manager = providers.Dependency() + + resource_repository = providers.Singleton( + SQLAlchemyRepository[Resource], + model=Resource, + db_client=db_client, + ) + + role_repository = providers.Singleton( + SQLAlchemyRepository[Role], + model=Role, + db_client=db_client, + ) + + auth_secrets_repository = providers.Singleton( + SQLAlchemyRepository[AuthSecrets], + model=AuthSecrets, + db_client=db_client, + ) + + kms_service = providers.Selector( + config.jwt_token.enable_cloud_kms, + true=providers.Singleton( + FloKmsService, cloud_provider=config.cloud_config.cloud_provider + ), + false=providers.Object(None), # No KMS service if cloud KMS is not enabled + ) + + token_service = providers.Singleton( + TokenService, + private_key=config.jwt_token.private_key, + public_key=config.jwt_token.public_key, + kms_service=kms_service, + token_expiry=config.jwt_token.token_expiry, + temporary_token_expiry=config.jwt_token.temporary_token_expiry, + app_env=config.env_config.app_env, + issuer=config.jwt_token.issuer, + audience=config.jwt_token.audience, + ) + + client_token_service = providers.Singleton( + ClientTokenService, + private_key_pem=config.app_config.client_secret, + client_id=config.app_config.client_id, + product_id=config.app_config.product_id, + ) + + if is_feature_enabled(SUPERSET_FLAG): + superset_service = providers.Singleton( + SupersetService, + url=config.superset.url, + username=config.superset.username, + password=config.superset.password, + cache_manager=cache_manager, + ) + + active_subscriptions = providers.Singleton(dict) + + outlook_service = providers.Singleton( + OutlookService, + client_id=config.outlook.client_id, + client_secret=config.outlook.client_secret, + tenant_id=config.outlook.tenant_id, + email_id=config.outlook.email_id, + authority=config.outlook.authority, + webhook_url=config.outlook.webhook_url, + active_subscriptions=active_subscriptions, + cache_manager=cache_manager, + ) diff --git a/wavefront/server/modules/auth_module/auth_module/controllers/hmac_controller.py b/wavefront/server/modules/auth_module/auth_module/controllers/hmac_controller.py new file mode 100644 index 00000000..e4916abd --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/controllers/hmac_controller.py @@ -0,0 +1,48 @@ +import secrets +import uuid + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.auth_secrets import AuthSecrets +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter + +hmac_router = APIRouter() + + +@hmac_router.post('/v1/developer/secrets') +@inject +async def generate_hmac_secret( + auth_secrets_repository: SQLAlchemyRepository[AuthSecrets] = Depends( + Provide[AuthContainer.auth_secrets_repository] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Generate a new HMAC client key and secret pair.""" + # Generate cryptographically secure random values + client_key = f'hmac_{uuid.uuid4().hex[:16]}' + client_secret = secrets.token_hex(32) # 64 character hex string + + # Store in database + auth_secret = await auth_secrets_repository.create( + client_key=client_key, client_secret=client_secret + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'client_key': auth_secret.client_key, + 'client_secret': auth_secret.client_secret, + 'created_at': auth_secret.created_at.isoformat(), + } + ), + ) diff --git a/wavefront/server/modules/auth_module/auth_module/controllers/outlook_controller.py b/wavefront/server/modules/auth_module/auth_module/controllers/outlook_controller.py new file mode 100644 index 00000000..53ca49dc --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/controllers/outlook_controller.py @@ -0,0 +1,225 @@ +from typing import Optional + +from auth_module.auth_container import AuthContainer +from auth_module.services.outlook_service import OutlookService +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import HTTPException +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.responses import PlainTextResponse +from fastapi.routing import APIRouter +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from flo_cloud.message_queue import MessageQueueManager +from flo_cloud.cloud_storage import CloudStorageManager +from pydantic import BaseModel +from pydantic import RootModel + +subscription_controller = APIRouter() + + +# Models +class SubscriptionRequest(BaseModel): + user_email: str + + +class ResourceData(RootModel): + root: dict + + +class Notification(BaseModel): + subscriptionId: str + resourceData: ResourceData | None = None + + +class WebhookNotificationPayload(BaseModel): + value: list[Notification] + + +def serialize_subscription(subscription): + return { + 'id': subscription.id, + 'application_id': subscription.application_id, + 'change_type': subscription.change_type, + 'client_state': subscription.client_state, + 'creator_id': subscription.creator_id, + 'notification_url': subscription.notification_url, + 'resource': subscription.resource, + } + + +# Endpoint to receive webhook notifications +@subscription_controller.post('/v1/data-sources/outlook/webhook/email_received') +@inject +async def receive_notification( + request: Request, + payload: Optional[WebhookNotificationPayload] = None, + service: OutlookService = Depends(Provide[AuthContainer.outlook_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + message_queue: MessageQueueManager = Depends( + Provide[KnowledgeBaseContainer.message_queue] + ), + cloud_storage: CloudStorageManager = Depends( + Provide[KnowledgeBaseContainer.cloud_storage] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + config=Depends(Provide[KnowledgeBaseContainer.config]), +): + # Check for validation token in query parameters + validation_token = request.query_params.get('validationToken') + if validation_token: + return PlainTextResponse(content=validation_token) + # Process the notification payload + try: + existing_kb = await knowledge_base_repository.find_one(name='email') + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the given id does not exist' + ), + ) + notifications = payload.value + for notification in notifications: + subscription_id = notification.subscriptionId + result = await service.view_subscription() + active_subs = result.value[0].id + if subscription_id in active_subs: + # This is a valid notification + resource_data = ( + notification.resourceData.root if notification.resourceData else {} + ) + user_email = result.value[0].resource.split('/')[2] + + if resource_data.get('@odata.type') == '#Microsoft.Graph.Message': + content = await service.get_email_details( + user_email, message_queue, cloud_storage, config, existing_kb + ) + if content: + return JSONResponse( + content={'status': 'notification received'}, status_code=202 + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'There is no message present for the user' + ), + ) + except Exception as e: + print(f'Error processing notification: {str(e)}') + raise HTTPException(status_code=500, detail='Invalid request') + + +# Endpoint to create a new subscription +@subscription_controller.post('/v1/data-sources/outlook/subscription') +@inject +async def create_subscription( + subscription_req: SubscriptionRequest, + service: OutlookService = Depends(Provide[AuthContainer.outlook_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + try: + result = await service.create_subscription(subscription_req.user_email) + if result: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the subsription', + } + ), + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Subscription already created for the user {subscription_req.user_email}' + ), + ) + except Exception as e: + print(e) + + +@subscription_controller.delete('/v1/data-sources/outlook/subscription/delete') +@inject +async def delete_subscription( + subscription_req: SubscriptionRequest, + service: OutlookService = Depends(Provide[AuthContainer.outlook_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + try: + deleted_subscriptions = await service.delete_all_subscriptions( + subscription_req.user_email + ) + if deleted_subscriptions: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': f'Subscriptions are deleted for these ids {deleted_subscriptions}', + } + ), + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'There is no subscription present to delete for the user' + ), + ) + except Exception as e: + print(f'Error deleting subscriptions: {str(e)}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Failed to delete subscriptions: {str(e)}', + ) + + +@subscription_controller.get('/v1/data-sources/outlook/subscriptions') +@inject +async def get_subscription( + service: OutlookService = Depends(Provide[AuthContainer.outlook_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + try: + result = await service.view_subscription() + if result.value: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'These are the subscriptions', + 'data': [serialize_subscription(sub) for sub in result.value], + } + ), + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'There is no subscription created for the user' + ), + ) + except Exception as e: + print(f'Error deleting subscriptions: {str(e)}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Failed to delete subscriptions: {str(e)}', + ) diff --git a/wavefront/server/modules/auth_module/auth_module/controllers/superset_controller.py b/wavefront/server/modules/auth_module/auth_module/controllers/superset_controller.py new file mode 100644 index 00000000..620f82f8 --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/controllers/superset_controller.py @@ -0,0 +1,94 @@ +from auth_module.auth_container import AuthContainer +from auth_module.services.superset_service import SupersetService +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import Query +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter + +superset_controller = APIRouter() + + +@inject +async def check_is_admin( + role_id: str, + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[AuthContainer.role_repository] + ), +) -> bool: + role = await role_repository.find_one(id=role_id) + if not role: + return False + + return role.name == 'admin' + + +@superset_controller.get('/v1/superset/authenticate') +@inject +async def superset_authenticator( + request: Request, + superset_service: SupersetService = Depends( + Provide[AuthContainer.superset_service] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + filter: str | None = Query(None, alias='$filter'), +): + user_id = request.state.session.user_id + role_id = request.state.session.role_id + dashboards = [] + data_filters = [] + is_admin = await check_is_admin(role_id) + + dashboards = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DASHBOARD + ) + + if not dashboards: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'User does not have access to any dashboard' + ), + ) + if not is_admin: + data_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + + if not is_admin and not data_filters: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'User does not have access to any dashboard' + ), + ) + + if data_filters and len(data_filters) < 1: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for user' + ), + ) + + guest_token = superset_service.generate_guest_token( + user_id, dashboards, data_filters, filter + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'token': guest_token}), + ) diff --git a/wavefront/server/modules/auth_module/auth_module/services/client_token_service.py b/wavefront/server/modules/auth_module/auth_module/services/client_token_service.py new file mode 100644 index 00000000..a6595c3c --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/services/client_token_service.py @@ -0,0 +1,44 @@ +import base64 +import json +import time + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import padding + +TOKEN_EXPIRY = 5 * 60 # 5 minutes in seconds + + +class ClientTokenService: + def __init__(self, private_key_pem: str, client_id: str, product_id: str): + self.client_id = client_id + self.product_id = product_id + # Look for alternate way to load private key + private_key_pem = base64.b64decode(private_key_pem).decode('utf-8') + self.private_key = serialization.load_pem_private_key( + private_key_pem.encode(), password=None + ) + + def generate_token(self) -> str: + payload = { + 'client_id': self.client_id, + 'product_id': self.product_id, + 'timestamp': int(time.time()), + 'exp': int(time.time() + TOKEN_EXPIRY), + } + + payload_str = json.dumps(payload, sort_keys=True) + payload_bytes = payload_str.encode() + + signature = self.private_key.sign( + payload_bytes, + padding.PSS( + mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH + ), + hashes.SHA256(), + ) + + signature_b64 = base64.urlsafe_b64encode(signature).decode().rstrip('=') + payload_b64 = base64.urlsafe_b64encode(payload_bytes).decode().rstrip('=') + + return f'{payload_b64}.{signature_b64}' diff --git a/wavefront/server/modules/auth_module/auth_module/services/outlook_service.py b/wavefront/server/modules/auth_module/auth_module/services/outlook_service.py new file mode 100644 index 00000000..8bfd6cdb --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/services/outlook_service.py @@ -0,0 +1,343 @@ +from datetime import datetime +import re +from typing import Dict, List, Optional +import uuid + +from azure.identity.aio import ClientSecretCredential +from bs4 import BeautifulSoup +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from kiota_abstractions.base_request_configuration import RequestConfiguration +import msal +from msgraph import GraphServiceClient +from msgraph.generated.models.subscription import Subscription +from flo_cloud.message_queue import MessageQueueManager +from flo_cloud.cloud_storage import CloudStorageManager +from msgraph.generated.users.item.messages.messages_request_builder import ( + MessagesRequestBuilder, +) +from pydantic import BaseModel +from pydantic import Field + + +class EmailAddress(BaseModel): + name: str + email: str + + +class Message(BaseModel): + id: str + created_date_time: str + last_modified_date_time: Optional[str] = None + subject: str + body: str + from_: EmailAddress = Field(alias='from') + to_recipients: List[EmailAddress] + sent_date_time: Optional[str] = None + received_date_time: Optional[str] = None + conversation_id: str + web_link: str + + class Config: + populate_by_name = True + + +class OutlookService: + def __init__( + self, + client_id: str, + client_secret: str, + tenant_id: str, + email_id: str, + authority: str, + webhook_url: str, + active_subscriptions: Dict, + cache_manager: CacheManager, + ): + self.client_id = client_id + self.client_secret = client_secret + self.tenant_id = tenant_id + self.email_id = email_id + self.authority = authority + self.webhook_url = webhook_url + self.active_subscriptions = active_subscriptions + self.app = msal.ConfidentialClientApplication( + self.client_id, + authority=self.authority + self.tenant_id, + client_credential=self.client_secret, + ) + self.credential = ClientSecretCredential( + tenant_id=self.tenant_id, + client_id=self.client_id, + client_secret=self.client_secret, + ) + self.cache_manager = cache_manager + + async def create_subscription(self, user_email: str) -> Optional[Dict]: + """Create a subscription for a specific user""" + + from datetime import datetime + from datetime import timedelta + + expiration_date = datetime.utcnow() + timedelta(days=3) + expiration_datetime = expiration_date.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + client_state = str(uuid.uuid4()) + subscription = Subscription( + change_type='created,updated', # Or "updated,deleted" + notification_url=self.webhook_url, + resource=f'/users/{user_email}/messages', + expiration_date_time=expiration_datetime, + client_state=client_state, + ) + graph_client = GraphServiceClient( + self.credential, scopes=['https://graph.microsoft.com/.default'] + ) + subscriptions = await graph_client.subscriptions.get() + if subscriptions.value: + return None + response = await graph_client.subscriptions.post(subscription) + self.active_subscriptions = response.id + return response + + async def get_email_details( + self, + user_email: str, + message_queue: MessageQueueManager, + cloud_storage: CloudStorageManager, + config: dict, + existing_kb=None, + ): + try: + graph_client = GraphServiceClient( + self.credential, scopes=['https://graph.microsoft.com/.default'] + ) + created_timestamp = self.cache_manager.get_str('created_date_time') + all_messages = [] + if created_timestamp: + filter_datetime = datetime.fromisoformat(created_timestamp) + # Add 1 second to exclude the exact timestamp + formatted_timestamp = filter_datetime.isoformat().replace('+00:00', 'Z') + query_params = ( + MessagesRequestBuilder.MessagesRequestBuilderGetQueryParameters( + orderby=['createdDateTime desc'], + filter=f'createdDateTime gt {formatted_timestamp}', + ) + ) + else: + query_params = ( + MessagesRequestBuilder.MessagesRequestBuilderGetQueryParameters( + orderby=['createdDateTime desc'], top=50 + ) + ) + request_configuration = RequestConfiguration( + query_parameters=query_params, + ) + response = await graph_client.users.by_user_id(user_email).messages.get( + request_configuration=request_configuration + ) + if not response.value: + return False + latest_created_time = response.value[0].created_date_time.isoformat() + created_time = response.value[-1].created_date_time.isoformat() + batch_messages = response.value + all_messages.append(batch_messages) + await self._process_and_publish_emails( + batch_messages, + user_email, + message_queue, + cloud_storage, + config, + existing_kb, + latest_created_time, + created_timestamp, + ) + while response.odata_next_link: + filter_datetime = datetime.fromisoformat(created_time) + formatted_timestamp = filter_datetime.isoformat().replace('+00:00', 'Z') + query_params = ( + MessagesRequestBuilder.MessagesRequestBuilderGetQueryParameters( + orderby=['createdDateTime desc'], + filter=f'createdDateTime lt {formatted_timestamp}', + ) + ) + request_configuration = RequestConfiguration( + query_parameters=query_params, + ) + response = await graph_client.users.by_user_id(user_email).messages.get( + request_configuration=request_configuration + ) + if not response.value: + break + batch_messages = response.value + all_messages.append(batch_messages) + await self._process_and_publish_emails( + batch_messages, + user_email, + message_queue, + cloud_storage, + config, + existing_kb, + latest_created_time, + created_timestamp, + ) + latest_created_time = response.value[0].created_date_time.isoformat() + created_time = response.value[-1].created_date_time.isoformat() + # After processing all batches, update the cache with the latest created_date_time + if all_messages: + self.cache_manager.add( + 'created_date_time', + all_messages[0][0].created_date_time.isoformat(), + ) + return True + + except Exception as e: + logger.info(f'Failed to get email details: {str(e)}') + return False + + def _build_message_dict(self, batch_messages): + messages = [] + for message in batch_messages: + message_data = { + 'id': message.id, + 'created_date_time': message.created_date_time.isoformat(), + 'last_modified_date_time': message.last_modified_date_time.isoformat() + if message.last_modified_date_time + else None, + 'subject': message.subject, + 'body': message.body.content, + 'from': EmailAddress( + name=message.from_.email_address.name, + email=message.from_.email_address.address, + ), + 'to_recipients': [ + EmailAddress( + name=recipient.email_address.name, + email=recipient.email_address.address, + ) + for recipient in message.to_recipients + ], + 'sent_date_time': message.sent_date_time.isoformat() + if message.sent_date_time + else None, + 'received_date_time': message.received_date_time.isoformat() + if message.received_date_time + else None, + 'conversation_id': message.conversation_id, + 'web_link': message.web_link, + } + messages.append(Message(**message_data)) + return messages + + async def _process_and_publish_emails( + self, + batch_messages, + user_email, + message_queue: MessageQueueManager, + cloud_storage: CloudStorageManager, + config: dict, + existing_kb, + created_timestamp, + timestamp, + ): + messages = self._build_message_dict(batch_messages) + for message in messages: + if ( + message.from_.email == user_email + or message.created_date_time == created_timestamp + if timestamp + else False + ): + continue + gcs_file_name = ( + f'kb_{existing_kb.id}/{message.id}/{message.conversation_id}' + ) + # Get bucket name from config + bucket_name = ( + config['gcp']['gcp_asset_storage_bucket'] + if config['cloud_config']['cloud_provider'] == 'gcp' + else config['aws']['aws_asset_storage_bucket'] + ) + + # Get topic/queue URL from config + topic_id = ( + config['gcp']['email_topic_id'] + if config['cloud_config']['cloud_provider'] == 'gcp' + else config['aws']['queue_url'] + ) + + data = { + 'bucket': bucket_name, + 'name': gcs_file_name, + 'kb_id': str(existing_kb.id), + 'doc_id': str(''), + 'parse_type': 'email', + 'conversation_id': message.conversation_id, + 'conversation_content': self.__clean_email_content(message.body), + } + message_queue.add_message( + message_body=data, topic_name_or_queue_url=topic_id + ) + + async def delete_all_subscriptions(self, user_email: str) -> List[str]: + """ + Delete all subscriptions associated with a specific user email + + Args: + credential: The credentials used to authenticate with Microsoft Graph + user_email: The email address of the user whose subscriptions are being deleted + + Returns: + List[str]: List of deleted subscription IDs + """ + try: + graph_client = GraphServiceClient( + self.credential, scopes=['https://graph.microsoft.com/.default'] + ) + subscriptions = await graph_client.subscriptions.get() + deleted_ids = [] + for subscription in subscriptions.value: + if f'/users/{user_email}/messages' in subscription.resource: + try: + await graph_client.subscriptions.by_subscription_id( + subscription.id + ).delete() + deleted_ids.append(subscription.id) + if subscription.id in self.active_subscriptions: + del self.active_subscriptions[subscription.id] + + except Exception as e: + logger.info( + f'Failed to delete subscription {subscription.id}: {str(e)}' + ) + + logger.info(f'Deleted {len(deleted_ids)} subscriptions for {user_email}') + return deleted_ids + + except Exception as e: + logger.info(f'Failed to delete subscriptions: {str(e)}') + return [] + + async def view_subscription(self): + graph_client = GraphServiceClient( + self.credential, scopes=['https://graph.microsoft.com/.default'] + ) + subscriptions = await graph_client.subscriptions.get() + return subscriptions + + def __clean_email_content(self, content): + """Remove images, links, and other non-text elements from the email content.""" + # data_content = [] + + soup = BeautifulSoup(content, 'html.parser') + + for img in soup.find_all('img'): + img.decompose() + + for a in soup.find_all('a'): + a.unwrap() + for script in soup(['script', 'style']): + script.decompose() + cleaned_content = soup.get_text(separator='\n', strip=True) + + cleaned_content = re.sub(r'\n+', '\n', cleaned_content).strip() + return cleaned_content diff --git a/wavefront/server/modules/auth_module/auth_module/services/superset_service.py b/wavefront/server/modules/auth_module/auth_module/services/superset_service.py new file mode 100644 index 00000000..1ce80ef6 --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/services/superset_service.py @@ -0,0 +1,132 @@ +from dataclasses import dataclass +from typing import Dict, List + +from common_module.log.logger import logger +from common_module.utils.odata_parser import fill_odata_query +from common_module.utils.odata_parser import prepare_odata_filter +from fastapi import HTTPException +import requests + + +@dataclass +class Resource: + key: str + value: str + + +def generate_rls_policy( + filters: List[Resource], odata_query_filter: str +) -> Dict[str, str]: + """ + Generate RLS policy from a list of filters. + + Automatically groups filters by unique keys to create appropriate + OR/AND conditions. + + :param filters: List of Filter objects + :return: RLS policy configuration + """ + # Group filters by their keys + if len(filters) == 0 and not odata_query_filter: + return [] + filter_groups: dict[str, list[str]] = {} + for filter_obj in filters: + if filter_obj.key not in filter_groups: + filter_groups[filter_obj.key] = [] + filter_groups[filter_obj.key].append(filter_obj.value) + + # Generate conditions + conditions = [] + for key, values in filter_groups.items(): + # If multiple values for a key, use OR + if len(values) > 1: + or_condition = ' OR '.join([f"{key} = '{value}'" for value in values]) + conditions.append(f'({or_condition})') + else: + # Single value, use direct equality + conditions.append(f"{key} = '{values[0]}'") + + if odata_query_filter: + # If an OData filter is provided, parse it and add to conditions + odata_condition, params = prepare_odata_filter(odata_query_filter) + odata_query_condition = fill_odata_query(odata_condition, params) + conditions.append(odata_query_condition) + + # Combine all conditions with AND + full_condition = ' AND '.join(conditions) + + return [{'clause': full_condition}] + + +class SupersetService: + def __init__(self, url, username, password, cache_manager): + self.url = url + self.username = username + self.password = password + self.cache_manager = cache_manager + + def generate_guest_token( + self, + user_id: str, + dashboards: List[Resource], + filters: List[Resource], + query_filter: str, + ): + dashboard_ids = [dashboard.value for dashboard in dashboards] + combined_keys = {':'.join(dashboard_ids)} + cache_key = f'superset:{user_id}:{combined_keys}' + cached_access_token = self.cache_manager.get_str(cache_key) + + logger.info('Fetching superset token from cache') + if cached_access_token: + access_token = cached_access_token + else: + login_body = { + 'password': self.password, + 'provider': 'db', + 'refresh': True, + 'username': self.username, + } + + login_response = requests.post( + f'{self.url}/api/v1/security/login', json=login_body + ) + if login_response.status_code != 200: + logger.error(f'error during superset login {login_response.text}') + raise HTTPException( + status_code=login_response.status_code, detail='Login failed' + ) + access_token = login_response.json().get('access_token') + logger.info('Saving superset token into cache') + self.cache_manager.add(cache_key, access_token, 900) + + resources = [{'type': 'dashboard', 'id': id} for id in dashboard_ids] + rls_policy = generate_rls_policy(filters, query_filter) + guest_token_body = { + 'resources': resources, + 'rls': rls_policy, + 'user': { + 'username': '', + 'first_name': '', + 'last_name': '', + }, + } + headers = { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {access_token}', + } + guest_token_response = requests.post( + f'{self.url}/api/v1/security/guest_token/', + json=guest_token_body, + headers=headers, + ) + if guest_token_response.status_code != 200: + logger.error( + f'Error getting superset guest token {guest_token_response.text}' + ) + raise HTTPException( + status_code=guest_token_response.status_code, + detail='Guest token generation failed', + ) + guest_token = guest_token_response.json().get('token') + return guest_token diff --git a/wavefront/server/modules/auth_module/auth_module/services/token_service.py b/wavefront/server/modules/auth_module/auth_module/services/token_service.py new file mode 100644 index 00000000..33e94f9d --- /dev/null +++ b/wavefront/server/modules/auth_module/auth_module/services/token_service.py @@ -0,0 +1,139 @@ +import base64 +import json +import jwt +import hashlib + +from datetime import datetime +from datetime import timedelta +from enum import Enum +from typing import Any +from flo_cloud._types import FloKMS + + +class TokenAlgorithms(str, Enum): + RS256 = 'RS256' + PS256 = 'PS256' + ES256 = 'ES256' + ES384 = 'ES384' + ES512 = 'ES512' + RS384 = 'RS384' + RS512 = 'RS512' + PS384 = 'PS384' + PS512 = 'PS512' + + +class TokenService: + def __init__( + self, + private_key: str, + public_key: str, + kms_service: FloKMS, + algorithm: TokenAlgorithms = TokenAlgorithms.PS256, + token_expiry: int = 4 * 60 * 60, # 4 hours in seconds + temporary_token_expiry: int = 10 * 60, # 10 minutes in seconds + app_env: str = 'production', + issuer: str = 'https://floware.rootflo.ai', + audience: str = 'https://floware.rootflo.ai', + ): + self.is_dev = app_env == 'dev' or (kms_service is None) + self.private_key = self._load_key(private_key) + self.public_key = self._load_key(public_key) + self.algorithm = TokenAlgorithms.RS256.value if self.is_dev else algorithm.value + self.token_expiry = int(token_expiry) + self.temporary_token_expiry = int(temporary_token_expiry) + self.kms_service = kms_service + self.issuer = issuer + self.audience = audience + + def _load_key(self, key: str): + key = base64.b64decode(key).decode('ascii') + return key + + def create_token( + self, + sub: str | None = None, + user_id: str | None = None, + role_id: str | None = None, + expiry: int | None = None, + payload: dict[str, Any] | None = None, + is_temporary: bool = False, + ) -> str: + if not is_temporary and (sub is None or user_id is None or role_id is None): + raise ValueError('Required values are missing for creating a token') + + now = datetime.now() + data = { + key: value + for key, value in [ + ('sub', sub), + ('user_id', user_id), + ('role_id', role_id), + ] + if value is not None + } + + expiry_seconds = expiry or ( + self.temporary_token_expiry if is_temporary else self.token_expiry + ) + data['exp'] = int((now + timedelta(seconds=expiry_seconds)).timestamp()) + data['iat'] = int(now.timestamp()) + data['iss'] = self.issuer + data['aud'] = self.audience + + if payload: + data.update(payload) + + if self.is_dev: + return jwt.encode({**data}, self.private_key, algorithm=self.algorithm) + else: + header = {'alg': self.algorithm, 'typ': 'JWT'} + + header_b64 = self._base64url_encode(json.dumps(header).encode()) + payload_b64 = self._base64url_encode(json.dumps(data).encode()) + message = f'{header_b64}.{payload_b64}' + + digest = hashlib.sha256(message.encode()).digest() + + signature = self.kms_service.sign(message=digest) + signature = self._base64url_encode(signature) + + return f'{message}.{signature}' + + def decode_token(self, token: str) -> dict: + if self.is_dev: + decoded = jwt.decode( + token, + self.public_key, + algorithms=[self.algorithm], + issuer=self.issuer, + audience=self.audience, + ) + return decoded + else: + header_b64, payload_b64, signature_b64 = token.split('.') + + message = f'{header_b64}.{payload_b64}' + digest = hashlib.sha256(message.encode()).digest() + signature = self._base64url_decode(signature_b64) + + is_valid = self.kms_service.verify(message=digest, signature=signature) + if not is_valid: + return {} + + public_key_pem = self.kms_service.get_public_key_pem() + + decoded = jwt.decode( + token, + public_key_pem, + algorithms=[self.algorithm], + issuer=self.issuer, + audience=self.audience, + ) + return decoded + + def _base64url_encode(self, data: bytes) -> str: + return base64.urlsafe_b64encode(data).rstrip(b'=').decode('utf-8') + + def _base64url_decode(self, data: str) -> bytes: + padding = '=' * (-len(data) % 4) + return base64.urlsafe_b64decode(data + padding) diff --git a/wavefront/server/modules/auth_module/pyproject.toml b/wavefront/server/modules/auth_module/pyproject.toml new file mode 100644 index 00000000..080f6ddb --- /dev/null +++ b/wavefront/server/modules/auth_module/pyproject.toml @@ -0,0 +1,40 @@ +[project] +name = "auth-module" +version = "0.0.1" +description = "Authentication module" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "flo-cloud", + "dependency-injector>=4.42.0,<5.0.0", + "msgraph-sdk>=1.5.4,<2.0.0", + "pyjwt[crypto]>=2.9.0", +] + +[tool.uv.sources] +common-module = { workspace = true } +flo-cloud = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["auth_module"] diff --git a/wavefront/server/modules/auth_module/tests/conftest.py b/wavefront/server/modules/auth_module/tests/conftest.py new file mode 100644 index 00000000..f661e9f5 --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/conftest.py @@ -0,0 +1,179 @@ +from unittest.mock import Mock +from uuid import uuid4 +import os + +from auth_module.auth_container import AuthContainer +from auth_module.controllers.superset_controller import superset_controller +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer +from db_repo_module.models.datasource import Datasource # noqa: F401 +from db_repo_module.models.dynamic_query_yaml import DynamicQueryYaml # noqa: F401 + +# Enable SUPERSET_FLAG for tests +os.environ['SUPERSET_FLAG'] = 'true' + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + + common_container = CommonContainer() + cache_manager_mock = Mock() + + # Mock token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + auth_container = AuthContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + auth_container.token_service.override(mock_token_service) + + # mocking auth container superset_service + mock_superset_service = Mock() + mock_superset_service.generate_guest_token.return_value = 'mock_guest_token' + if hasattr(auth_container, 'superset_service'): + auth_container.superset_service.override(mock_superset_service) + + user_container = UserContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + # mocking user container cache_manager + cache_manager_mock = Mock() + cache_manager_mock.get_str.return_value = None + user_container.cache_manager.override(cache_manager_mock) + + common_container.wire( + packages=[ + 'user_management_module.controllers', + 'auth_module.controllers', + 'user_management_module.authorization', + ] + ) + auth_container.wire( + packages=[ + 'auth_module.controllers', + 'user_management_module.authorization', + ] + ) + user_container.wire( + packages=[ + 'user_management_module.authorization', + 'auth_module.controllers', + ] + ) + yield auth_container, common_container + auth_container.unwire() + common_container.unwire() + user_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequestIdMiddleware) + app.add_middleware(RequireAuthMiddleware) + app.include_router(superset_controller) + return TestClient(app) + + +@pytest.fixture +def mock_auth_functions(monkeypatch): + async def mock_check_is_admin(role_id): + return True + + monkeypatch.setattr( + 'auth_module.controllers.superset_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mock_admin_false_functions(monkeypatch): + async def mock_check_is_not_admin(role_id): + return False + + monkeypatch.setattr( + 'auth_module.controllers.superset_controller.check_is_admin', + mock_check_is_not_admin, + ) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token diff --git a/wavefront/server/modules/auth_module/tests/data/idp_metadata.xml b/wavefront/server/modules/auth_module/tests/data/idp_metadata.xml new file mode 100644 index 00000000..577f2c9a --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/data/idp_metadata.xml @@ -0,0 +1,36 @@ + + + + + + + + MIICYDCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQ0FADCBkDELMAkGA1UEBhMCR0Ix + EzARBgNVBAgMClNvbWUtU3RhdGUxDjAMBgNVBAcMBVRvd24xMRIwEAYDVQQKDAlU + ZXN0IENvcnAxDzANBgNVBAsMBlRlc3RPdTEVMBMGA1UEAwwMdGVzdC1pZHAuY29t + MSAwHgYJKoZIhvcNAQkBFhF0ZXN0QHRlc3QtaWRwLmNvbTAeFw0yMzAxMDEwMDAw + MDBaFw0yNDAxMDEwMDAwMDBaMIGQMQswCQYDVQQGEwJHQjETMBEGA1UECAwKU29t + ZS1TdGF0ZTEOMBIGA1UEBwwFVG93bjExEjAQBgNVBAoMCVRlc3QgQ29ycDEPMA0G + A1UECwwGVGVzdE91MRUwEwYDVQQDDAx0ZXN0LWlkcC5jb20xIDAeBgkqhkiG9w0B + CQEWEXRlc3RAdGVzdC1pZHAuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKB + gQC7JL6Sy6G/Jz6Zx9M3c9vH3QK+Y8O8p8lX9jS+7C+5FCwB7JM+wVkApZJCHh6j + Q9L5+UZ7F9QK/1t3jF4Y4G4fUVmJ4Y4Qc8l+UJm3jZ3H5oR7A8ZyUGp+4J5H5y7E + 8lW/vX6TqO6z6G8TZw4ZqZwXj5QjhBn1f5BwqwIDAQABMA0GCSqGSIb3DQEBBQUA + A4GBAGkV0yqQjL0LKqpC1wFJz5TSE4Lb4x6Kyh9zDkPUz8qVqPInBZ6LBKoTZHJO + Yw4TF7hxuBm8kPJyK7HyUzGUL8u5tB8kyEVlx9jJQ6DZQ3vR0l4H5F9k3TZ4sjqw + Z4C9xZ9i5eHJ4Q6x1Yj6JhXJysC5xDkBZ6JxyfYYB6tJ + + + + + urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress + + + + diff --git a/wavefront/server/modules/auth_module/tests/data/key_gen.sh b/wavefront/server/modules/auth_module/tests/data/key_gen.sh new file mode 100644 index 00000000..0a9d683b --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/data/key_gen.sh @@ -0,0 +1,4 @@ +# run the below code in terminal on appropriate folder to generate pvt and pub key + +# $ssh-keygen -t rsa -P "" -b 4096 -m PEM -f test_private_key.pem +# $ssh-keygen -e -m PEM -f test_private_key.pem > test_public_key.pem diff --git a/wavefront/server/modules/auth_module/tests/data/test_private_key.pem b/wavefront/server/modules/auth_module/tests/data/test_private_key.pem new file mode 100644 index 00000000..fba64627 --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/data/test_private_key.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKgIBAAKCAgEAyvC7Yadh+hgMM7luP6k1cp0K78FR3MzRfNwURbKLsTaavmMK +iyQ8b9n0taY9JVarEbOBEG5XJfNiQ6c8SJkzBwDT+HOkUwaDh33aKqWOUZELVdwb +5GXRqcD21V2Mq7tGT8EjWNlWOL/nSnu1WrSIvPk+3iEtrWy2opNn9E9mXShX1VDy +g+iwcO82l6ts5pN8jy6A7IABJlmfRZOUi55bsknpTwu1gNksTa82Gc3n2M0BIhjr +CYvCHCJyxXf0ZNVlm8UdGh2Vp3+xCx2Omdh1u4qfUA4eaqcYGTYL1inh7a73OIxt +CtaTca8iuScnMbfndUr0fw3XuCp5i+H3qUajQnEhg4payvVqLxopSINNJEbcMSIJ +Y1ITj8gEWZevhGMZJXD0T1rqu2nixiSsfJfx1AXQl7Nq7eTD4x5Q246Rj67KzmXs +6nu9lgAH8EShL6DKpuCkqgapHiGnaOMXhZjXcxBek3FyTXFuWrwaWlt1OyIN7ckm +AFAvNxfQdhpjvdHhTBxmoDIE4PN5e1hbce9Bpi1/q8MQtW3X24FRIZ9IS8muBt7R +7lm59pe3Qhz0irHrlKir2BMFzf8FNAImJA6QPs7663Oj7+bCcvdwiFdD+5TfUaqh +0/JKKlo7r1Xhf3zJucLVmBmw+w4TWEN+Rx9r3UE0u6FqzOpFbNkYxoajKqECAwEA +AQKCAgEAh5opqwvcNO+Cn8FEwLcA5q+20hNb7uzRAdxQk23FV8mDxrmJHwY8eASs +NgB5lQx/TFmkYOVJyCsDuu7xIhVz+jb8bGRhJbURiCR49cwf97KyX1GvNYMvgH1W +saVLzKhbSAlm5AR9T3gzRRq6SHyfbZ4h+u0yh1JACwvyB1ZSsOa3Ietv0OJXm27M +DLZYEw3zyfyp2t2BZy21dhkxxcUvwkcO5WmOgyUaLGJqBP4H8F6zZiQ+UOeY8ZG3 +HOmVXRcWSOqCEqQ4Dm8NTr8WPQhdSs5Ku4RjoKI8QQRTDmx38Yja2mdCqV5NsUX0 +LsHRt7AJsd8+1fLa9M004FpCUVLI3Pix7UcA4c+Mkp/XeEYjwS08grTqbjqcigna +Q4Mk2TdmJkHVeuO49m2rud+MGW35X7AfZfg5EgXeiwEnJrO5LITHwdNohxOdZLt6 +YBVUh28QgINRb5Eumwykh5FKqnpthc4QcCbgpLMJ7AbRJFUyKjm00UjMCXtpobxT +0sRYm+YoD4vcQZ7q81VZaCgj4NprjbUsj8q5LfbBaSVA4O2M4YkQOfvwLvnflYIC +rvDNjFwcSaWt/hMk+ase6Ej/+idyU75u+mHnfVw/LjhMYmyi4+7f1m2pa/cWWjeu +ReBn7d+vF1xtW/PJpPF8yGLgTLIjGDmHsyHfMny7mGpTLCjylK0CggEBAO41pkr0 +pQBglpt5rnz80BmxvxRUe+S6zv+nJIBxZEz4SXyUuK98+7rDHyEq66db6jNkMyp1 +MvaUbsSwKWtKqekz/MmorF4dXwH8MYN0QJe/t5PalRyDToVfpiJgBg81dt81KHUp +RQIyOCvkWateKJioEtHdpn+WesKj0l5H7hAqfMTppUqoB7tsyiEeo6j2Pd9DxLr2 +tV6eS5aIY+BZfIGA++krrPDI+Pl+RXJJxV3Dim7Haza5B+ot3FDIThFm3HnTx7Sr +DZ40mJaW1KXcXZxsdy4CK/ERM10Zne/bIvly5TErNBzDcDlZcBZtLKdP98NwdG/U +U4D5kdEDExA/DrMCggEBANoYxGQd0sFN7w8shGoansbSlMsk1Gj2rXciAlh2vcMP +pRcnCVS3cqF48jPkqTez5jNOzkpa+Yp3uozQXtPxOUaRFJMmklTnA1mZQreJrl0s +EnPkq3S4FkTeR/KuRk4jdbnVNKs+e8VoCybdrmL3ql5UnB5O7QfgESzbgMrljclZ +YbaOYvLvME9mWr7AldAKUWEe5SLJ2qVRQyHtyTIO6kdjXPQy22FWnsyS81nqAuf3 +rAZSSKTixq1Y0nbUWxTIRqLLaXKr7qAxhPXZM2dLNtByNuiKcAncaPGXHj/ziSkr +5D6sGenBayyjonWQdD4tRLxWG3fvn3aH83rU3Fqry1sCggEACsX6Y2F1JPM5XIbA +UX7NmpvjXHQFtm6MdhuFFxoJw0FncW74e1dXuzaf7cwXJFaP3QsnFUB/sY89A0BE +X0ndLWLgh+5M/WSuq1RQmPVlBnfxQYqdO9DkeDu/M8bL/1wXqDirP8FCEbtbtwB4 +3btAcscHMt6nbRXNL4hhN0phhfGl+vHmO94lm2BsM9tC451fZv2VpC8MPN9D4+b/ +PWJOszy2J/GFvM4aFI6VbXnQp1D0rdXvD2aG6Rxx3mVwwnC4f/oICi/CSelvDUB9 +FZNF2JE/QqFug1SX0hoafoKWrll37CVcEgD0zmMxIpKFdnuzZO9tFLGxpR+kbKOK +9S0k6QKCAQEAk6E0PzjY0rS4X6MYtuHyHE+rHJ9/kNV5siy5PbyS0xkh/TwVWt0a +oSbHfE0KJoSGDLEJftd6FzrYcj0SE5Ez3gqSzNDNON9vKxVNTgPKnZMZoo1lagq6 +OD/mn+VL/dSO+g3SKyCOk5/L/LEOPDu9fUD0tKdkV3Ja7exfj/g3M9ZnCDX10NAL ++ZMlVf3y6vIxB/brz9KnSERrlunoyRPDjjM6VqqeSZNixsOqKnD3KqJCNb05IzPG +nMyFGp7AUJ8JAWWDqXWsf49Fm009OGx3XD4T4PDkyigCeZZ7iXqGm/Hz/GBu4gZl +T8AsVjxEE1p9LqdyzIrNfGSmcbzotA+aBQKCAQEAk+rzu50WLFV2HXTRzLU7bnZ4 +jrQ43mOw0sJBN1k32TjCSR9sXkaa9FOZ6xor8j1nstotg8fChnju7vkVXPQzJfAR +M3YpNVpY7bb2Expvf6gbT74JFYVaHDUzz1vL87aKB9DnGz03jf38z30kQ+F9bI8f +vgHUo5FJSN1XTlbjFBC2iIb/HxRf3G4dGMdlp08Ts2TdBgsm2bVhYpl9s8Q08uo1 +2SU6IxCJAaTiMrg8hNKjE0NFQvGEgY0Ml3Mip+SYCYi7ZJ9VSEzUbEYihqb2As05 +2VdZ1Ei1iG+s+4Sw+0z7FpHCxmzW5XOeC6LZlhg3OUVa9rkazZSD3L39RVwxPw== +-----END RSA PRIVATE KEY----- diff --git a/wavefront/server/modules/auth_module/tests/data/test_public_key.pem b/wavefront/server/modules/auth_module/tests/data/test_public_key.pem new file mode 100644 index 00000000..f49995bb --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/data/test_public_key.pem @@ -0,0 +1,13 @@ +-----BEGIN RSA PUBLIC KEY----- +MIICCgKCAgEAyvC7Yadh+hgMM7luP6k1cp0K78FR3MzRfNwURbKLsTaavmMKiyQ8 +b9n0taY9JVarEbOBEG5XJfNiQ6c8SJkzBwDT+HOkUwaDh33aKqWOUZELVdwb5GXR +qcD21V2Mq7tGT8EjWNlWOL/nSnu1WrSIvPk+3iEtrWy2opNn9E9mXShX1VDyg+iw +cO82l6ts5pN8jy6A7IABJlmfRZOUi55bsknpTwu1gNksTa82Gc3n2M0BIhjrCYvC +HCJyxXf0ZNVlm8UdGh2Vp3+xCx2Omdh1u4qfUA4eaqcYGTYL1inh7a73OIxtCtaT +ca8iuScnMbfndUr0fw3XuCp5i+H3qUajQnEhg4payvVqLxopSINNJEbcMSIJY1IT +j8gEWZevhGMZJXD0T1rqu2nixiSsfJfx1AXQl7Nq7eTD4x5Q246Rj67KzmXs6nu9 +lgAH8EShL6DKpuCkqgapHiGnaOMXhZjXcxBek3FyTXFuWrwaWlt1OyIN7ckmAFAv +NxfQdhpjvdHhTBxmoDIE4PN5e1hbce9Bpi1/q8MQtW3X24FRIZ9IS8muBt7R7lm5 +9pe3Qhz0irHrlKir2BMFzf8FNAImJA6QPs7663Oj7+bCcvdwiFdD+5TfUaqh0/JK +Klo7r1Xhf3zJucLVmBmw+w4TWEN+Rx9r3UE0u6FqzOpFbNkYxoajKqECAwEAAQ== +-----END RSA PUBLIC KEY----- diff --git a/wavefront/server/modules/auth_module/tests/fixtures/__init__.py b/wavefront/server/modules/auth_module/tests/fixtures/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/auth_module/tests/fixtures/keys/private_key.pem b/wavefront/server/modules/auth_module/tests/fixtures/keys/private_key.pem new file mode 100644 index 00000000..fba64627 --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/fixtures/keys/private_key.pem @@ -0,0 +1,51 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIJKgIBAAKCAgEAyvC7Yadh+hgMM7luP6k1cp0K78FR3MzRfNwURbKLsTaavmMK +iyQ8b9n0taY9JVarEbOBEG5XJfNiQ6c8SJkzBwDT+HOkUwaDh33aKqWOUZELVdwb +5GXRqcD21V2Mq7tGT8EjWNlWOL/nSnu1WrSIvPk+3iEtrWy2opNn9E9mXShX1VDy +g+iwcO82l6ts5pN8jy6A7IABJlmfRZOUi55bsknpTwu1gNksTa82Gc3n2M0BIhjr +CYvCHCJyxXf0ZNVlm8UdGh2Vp3+xCx2Omdh1u4qfUA4eaqcYGTYL1inh7a73OIxt +CtaTca8iuScnMbfndUr0fw3XuCp5i+H3qUajQnEhg4payvVqLxopSINNJEbcMSIJ +Y1ITj8gEWZevhGMZJXD0T1rqu2nixiSsfJfx1AXQl7Nq7eTD4x5Q246Rj67KzmXs +6nu9lgAH8EShL6DKpuCkqgapHiGnaOMXhZjXcxBek3FyTXFuWrwaWlt1OyIN7ckm +AFAvNxfQdhpjvdHhTBxmoDIE4PN5e1hbce9Bpi1/q8MQtW3X24FRIZ9IS8muBt7R +7lm59pe3Qhz0irHrlKir2BMFzf8FNAImJA6QPs7663Oj7+bCcvdwiFdD+5TfUaqh +0/JKKlo7r1Xhf3zJucLVmBmw+w4TWEN+Rx9r3UE0u6FqzOpFbNkYxoajKqECAwEA +AQKCAgEAh5opqwvcNO+Cn8FEwLcA5q+20hNb7uzRAdxQk23FV8mDxrmJHwY8eASs +NgB5lQx/TFmkYOVJyCsDuu7xIhVz+jb8bGRhJbURiCR49cwf97KyX1GvNYMvgH1W +saVLzKhbSAlm5AR9T3gzRRq6SHyfbZ4h+u0yh1JACwvyB1ZSsOa3Ietv0OJXm27M +DLZYEw3zyfyp2t2BZy21dhkxxcUvwkcO5WmOgyUaLGJqBP4H8F6zZiQ+UOeY8ZG3 +HOmVXRcWSOqCEqQ4Dm8NTr8WPQhdSs5Ku4RjoKI8QQRTDmx38Yja2mdCqV5NsUX0 +LsHRt7AJsd8+1fLa9M004FpCUVLI3Pix7UcA4c+Mkp/XeEYjwS08grTqbjqcigna +Q4Mk2TdmJkHVeuO49m2rud+MGW35X7AfZfg5EgXeiwEnJrO5LITHwdNohxOdZLt6 +YBVUh28QgINRb5Eumwykh5FKqnpthc4QcCbgpLMJ7AbRJFUyKjm00UjMCXtpobxT +0sRYm+YoD4vcQZ7q81VZaCgj4NprjbUsj8q5LfbBaSVA4O2M4YkQOfvwLvnflYIC +rvDNjFwcSaWt/hMk+ase6Ej/+idyU75u+mHnfVw/LjhMYmyi4+7f1m2pa/cWWjeu +ReBn7d+vF1xtW/PJpPF8yGLgTLIjGDmHsyHfMny7mGpTLCjylK0CggEBAO41pkr0 +pQBglpt5rnz80BmxvxRUe+S6zv+nJIBxZEz4SXyUuK98+7rDHyEq66db6jNkMyp1 +MvaUbsSwKWtKqekz/MmorF4dXwH8MYN0QJe/t5PalRyDToVfpiJgBg81dt81KHUp +RQIyOCvkWateKJioEtHdpn+WesKj0l5H7hAqfMTppUqoB7tsyiEeo6j2Pd9DxLr2 +tV6eS5aIY+BZfIGA++krrPDI+Pl+RXJJxV3Dim7Haza5B+ot3FDIThFm3HnTx7Sr +DZ40mJaW1KXcXZxsdy4CK/ERM10Zne/bIvly5TErNBzDcDlZcBZtLKdP98NwdG/U +U4D5kdEDExA/DrMCggEBANoYxGQd0sFN7w8shGoansbSlMsk1Gj2rXciAlh2vcMP +pRcnCVS3cqF48jPkqTez5jNOzkpa+Yp3uozQXtPxOUaRFJMmklTnA1mZQreJrl0s +EnPkq3S4FkTeR/KuRk4jdbnVNKs+e8VoCybdrmL3ql5UnB5O7QfgESzbgMrljclZ +YbaOYvLvME9mWr7AldAKUWEe5SLJ2qVRQyHtyTIO6kdjXPQy22FWnsyS81nqAuf3 +rAZSSKTixq1Y0nbUWxTIRqLLaXKr7qAxhPXZM2dLNtByNuiKcAncaPGXHj/ziSkr +5D6sGenBayyjonWQdD4tRLxWG3fvn3aH83rU3Fqry1sCggEACsX6Y2F1JPM5XIbA +UX7NmpvjXHQFtm6MdhuFFxoJw0FncW74e1dXuzaf7cwXJFaP3QsnFUB/sY89A0BE +X0ndLWLgh+5M/WSuq1RQmPVlBnfxQYqdO9DkeDu/M8bL/1wXqDirP8FCEbtbtwB4 +3btAcscHMt6nbRXNL4hhN0phhfGl+vHmO94lm2BsM9tC451fZv2VpC8MPN9D4+b/ +PWJOszy2J/GFvM4aFI6VbXnQp1D0rdXvD2aG6Rxx3mVwwnC4f/oICi/CSelvDUB9 +FZNF2JE/QqFug1SX0hoafoKWrll37CVcEgD0zmMxIpKFdnuzZO9tFLGxpR+kbKOK +9S0k6QKCAQEAk6E0PzjY0rS4X6MYtuHyHE+rHJ9/kNV5siy5PbyS0xkh/TwVWt0a +oSbHfE0KJoSGDLEJftd6FzrYcj0SE5Ez3gqSzNDNON9vKxVNTgPKnZMZoo1lagq6 +OD/mn+VL/dSO+g3SKyCOk5/L/LEOPDu9fUD0tKdkV3Ja7exfj/g3M9ZnCDX10NAL ++ZMlVf3y6vIxB/brz9KnSERrlunoyRPDjjM6VqqeSZNixsOqKnD3KqJCNb05IzPG +nMyFGp7AUJ8JAWWDqXWsf49Fm009OGx3XD4T4PDkyigCeZZ7iXqGm/Hz/GBu4gZl +T8AsVjxEE1p9LqdyzIrNfGSmcbzotA+aBQKCAQEAk+rzu50WLFV2HXTRzLU7bnZ4 +jrQ43mOw0sJBN1k32TjCSR9sXkaa9FOZ6xor8j1nstotg8fChnju7vkVXPQzJfAR +M3YpNVpY7bb2Expvf6gbT74JFYVaHDUzz1vL87aKB9DnGz03jf38z30kQ+F9bI8f +vgHUo5FJSN1XTlbjFBC2iIb/HxRf3G4dGMdlp08Ts2TdBgsm2bVhYpl9s8Q08uo1 +2SU6IxCJAaTiMrg8hNKjE0NFQvGEgY0Ml3Mip+SYCYi7ZJ9VSEzUbEYihqb2As05 +2VdZ1Ei1iG+s+4Sw+0z7FpHCxmzW5XOeC6LZlhg3OUVa9rkazZSD3L39RVwxPw== +-----END RSA PRIVATE KEY----- diff --git a/wavefront/server/modules/auth_module/tests/fixtures/keys/public_key.pem b/wavefront/server/modules/auth_module/tests/fixtures/keys/public_key.pem new file mode 100644 index 00000000..f49995bb --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/fixtures/keys/public_key.pem @@ -0,0 +1,13 @@ +-----BEGIN RSA PUBLIC KEY----- +MIICCgKCAgEAyvC7Yadh+hgMM7luP6k1cp0K78FR3MzRfNwURbKLsTaavmMKiyQ8 +b9n0taY9JVarEbOBEG5XJfNiQ6c8SJkzBwDT+HOkUwaDh33aKqWOUZELVdwb5GXR +qcD21V2Mq7tGT8EjWNlWOL/nSnu1WrSIvPk+3iEtrWy2opNn9E9mXShX1VDyg+iw +cO82l6ts5pN8jy6A7IABJlmfRZOUi55bsknpTwu1gNksTa82Gc3n2M0BIhjrCYvC +HCJyxXf0ZNVlm8UdGh2Vp3+xCx2Omdh1u4qfUA4eaqcYGTYL1inh7a73OIxtCtaT +ca8iuScnMbfndUr0fw3XuCp5i+H3qUajQnEhg4payvVqLxopSINNJEbcMSIJY1IT +j8gEWZevhGMZJXD0T1rqu2nixiSsfJfx1AXQl7Nq7eTD4x5Q246Rj67KzmXs6nu9 +lgAH8EShL6DKpuCkqgapHiGnaOMXhZjXcxBek3FyTXFuWrwaWlt1OyIN7ckmAFAv +NxfQdhpjvdHhTBxmoDIE4PN5e1hbce9Bpi1/q8MQtW3X24FRIZ9IS8muBt7R7lm5 +9pe3Qhz0irHrlKir2BMFzf8FNAImJA6QPs7663Oj7+bCcvdwiFdD+5TfUaqh0/JK +Klo7r1Xhf3zJucLVmBmw+w4TWEN+Rx9r3UE0u6FqzOpFbNkYxoajKqECAwEAAQ== +-----END RSA PUBLIC KEY----- diff --git a/wavefront/server/modules/auth_module/tests/fixtures/test_keys.py b/wavefront/server/modules/auth_module/tests/fixtures/test_keys.py new file mode 100644 index 00000000..e1aa1651 --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/fixtures/test_keys.py @@ -0,0 +1,20 @@ +import base64 +import os + + +def load_test_key(filename: str) -> str: + """Load and base64 encode a test key file.""" + current_dir = os.path.dirname(os.path.abspath(__file__)) + key_path = os.path.join(current_dir, 'keys', filename) + + with open(key_path, 'rb') as key_file: + key_data = key_file.read() + return base64.b64encode(key_data).decode() + + +def get_test_keys(): + """Get both test keys encoded in base64.""" + return { + 'private_key': load_test_key('private_key.pem'), + 'public_key': load_test_key('public_key.pem'), + } diff --git a/wavefront/server/modules/auth_module/tests/test_superset_controller.py b/wavefront/server/modules/auth_module/tests/test_superset_controller.py new file mode 100644 index 00000000..1af3f259 --- /dev/null +++ b/wavefront/server/modules/auth_module/tests/test_superset_controller.py @@ -0,0 +1,305 @@ +import json +from uuid import uuid4 + +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + + +@pytest.mark.asyncio +async def test_superset_authenticator_with_admin( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + mock_auth_functions, +): + role_id = str(uuid4()) + dashboard_resource_id = str(uuid4()) + data_filter_resource_id = str(uuid4()) + + # Create a user in the database + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + role = Role(id=role_id, name='test_role', description='Test Role Description') + dashboard_resource = Resource( + id=dashboard_resource_id, + key='dashboard_resource', + value='test_dashboard', + description='Test Dashboard Resource', + scope=ResourceScope.DASHBOARD, + ) + data_filter_resource = Resource( + id=data_filter_resource_id, + key='region', + value='North', + description='Region filter for North region', + scope=ResourceScope.DATA, + meta=json.dumps( + {'type': 'string', 'allowed_values': ['North', 'South', 'East', 'West']} + ), + ) + dashboard_role_resource = RoleResource( + role_id=role_id, resource_id=dashboard_resource_id + ) + data_filter_role_resource = RoleResource( + role_id=role_id, resource_id=data_filter_resource_id + ) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + + async with test_session() as session: + # First add and commit the user, session, and role + session.add(user) + session.add(db_session) + session.add(role) + await session.commit() + + # Then add and commit the resources + session.add(dashboard_resource) + session.add(data_filter_resource) + await session.commit() + + # Finally add and commit the mappings + session.add(dashboard_role_resource) + session.add(data_filter_role_resource) + session.add(user_role) + await session.commit() + + response = test_client.get( + '/v1/superset/authenticate', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.json()['data']['token'] == 'mock_guest_token' + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_superset_authenticator_without_admin( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + mock_admin_false_functions, +): + role_id = str(uuid4()) + dashboard_resource_id = str(uuid4()) + data_filter_resource_id = str(uuid4()) + + # Create a user in the database + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + role = Role(id=role_id, name='test_role', description='Test Role Description') + dashboard_resource = Resource( + id=dashboard_resource_id, + key='dashboard_resource', + value='test_dashboard', + description='Test Dashboard Resource', + scope=ResourceScope.DASHBOARD, + ) + data_filter_resource = Resource( + id=data_filter_resource_id, + key='region', + value='North', + description='Region filter for North region', + scope=ResourceScope.DATA, + meta=json.dumps( + {'type': 'string', 'allowed_values': ['North', 'South', 'East', 'West']} + ), + ) + dashboard_role_resource = RoleResource( + role_id=role_id, resource_id=dashboard_resource_id + ) + data_filter_role_resource = RoleResource( + role_id=role_id, resource_id=data_filter_resource_id + ) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + + async with test_session() as session: + # First add and commit the user, session, and role + session.add(user) + session.add(db_session) + session.add(role) + await session.commit() + + # Then add and commit the resources + session.add(dashboard_resource) + session.add(data_filter_resource) + await session.commit() + + # Finally add and commit the mappings + session.add(dashboard_role_resource) + session.add(data_filter_role_resource) + session.add(user_role) + await session.commit() + + response = test_client.get( + '/v1/superset/authenticate', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_superset_authenticator_with_admin_and_dashboard_empty( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + mock_auth_functions, +): + role_id = str(uuid4()) + dashboard_resource_id = str(uuid4()) + data_filter_resource_id = str(uuid4()) + + # Create a user in the database + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + role = Role(id=role_id, name='test_role', description='Test Role Description') + dashboard_resource = Resource( + id=dashboard_resource_id, + key='dashboard_resource', + value='test_dashboard', + description='Test Dashboard Resource', + scope=ResourceScope.CONSOLE, + ) + data_filter_resource = Resource( + id=data_filter_resource_id, + key='region', + value='North', + description='Region filter for North region', + scope=ResourceScope.DATA, + meta=json.dumps( + {'type': 'string', 'allowed_values': ['North', 'South', 'East', 'West']} + ), + ) + dashboard_role_resource = RoleResource( + role_id=role_id, resource_id=dashboard_resource_id + ) + data_filter_role_resource = RoleResource( + role_id=role_id, resource_id=data_filter_resource_id + ) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + + async with test_session() as session: + # First add and commit the user, session, and role + session.add(user) + session.add(db_session) + session.add(role) + await session.commit() + + # Then add and commit the resources + session.add(dashboard_resource) + session.add(data_filter_resource) + await session.commit() + + # Finally add and commit the mappings + session.add(dashboard_role_resource) + session.add(data_filter_role_resource) + session.add(user_role) + await session.commit() + + response = test_client.get( + '/v1/superset/authenticate', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_superset_authenticator_not_admin_and_data_filter_empty( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + mock_admin_false_functions, +): + role_id = str(uuid4()) + dashboard_resource_id = str(uuid4()) + + # Create a user in the database + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + role = Role(id=role_id, name='test_role', description='Test Role Description') + dashboard_resource = Resource( + id=dashboard_resource_id, + key='dashboard_resource', + value='test_dashboard', + description='Test Dashboard Resource', + scope=ResourceScope.DASHBOARD, + ) + dashboard_role_resource = RoleResource( + role_id=role_id, resource_id=dashboard_resource_id + ) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + + async with test_session() as session: + # First add and commit the user, session, and role + session.add(user) + session.add(db_session) + session.add(role) + await session.commit() + + # Then add and commit the resources + session.add(dashboard_resource) + await session.commit() + + # Finally add and commit the mappings + session.add(dashboard_role_resource) + session.add(user_role) + await session.commit() + + response = test_client.get( + '/v1/superset/authenticate', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 400 diff --git a/wavefront/server/modules/common_module/common_module/common_cache.py b/wavefront/server/modules/common_module/common_module/common_cache.py new file mode 100644 index 00000000..f540606a --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/common_cache.py @@ -0,0 +1,23 @@ +from abc import ABC +from abc import abstractmethod +from typing import Any, Optional, Union + + +class CommonCache(ABC): + @abstractmethod + def add( + self, + key: str, + value: Union[str, int, float, bytes], + expiry: int = 3600, + nx: bool = False, + ) -> bool: + pass + + @abstractmethod + def get_str(self, key: str, default: Any = None) -> Optional[str]: + pass + + @abstractmethod + def remove(self, key: str) -> bool: + pass diff --git a/wavefront/server/modules/common_module/common_module/common_container.py b/wavefront/server/modules/common_module/common_module/common_container.py new file mode 100644 index 00000000..97946950 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/common_container.py @@ -0,0 +1,26 @@ +from common_module.response_formatter import ResponseFormatter +from common_module.scheduler import Scheduler +from dependency_injector import containers +from dependency_injector import providers + +from flo_cloud.cloud_storage import CloudStorageManager + + +class CommonContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['./config.ini']) + + response_formatter = providers.Singleton(ResponseFormatter) + + cache_manager = providers.Dependency() + + # periodic scheduler + scheduler = providers.Singleton( + Scheduler, + redis_host=config.redis.host, + redis_port=config.redis.port, + cache_manager=cache_manager, + ) + + cloud_storage_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) diff --git a/wavefront/server/modules/common_module/common_module/feature/feature_flag.py b/wavefront/server/modules/common_module/common_module/feature/feature_flag.py new file mode 100644 index 00000000..094bb957 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/feature/feature_flag.py @@ -0,0 +1,31 @@ +import os + +AZURE_FLAG = 'AZURE_FLAG' +AZURE_OPENAI_FLAG = 'AZURE_OPENAI_FLAG' +CELERY_FLAG = 'CELERY_FLAG' +EMAIL_SYNC_FLAG = 'EMAIL_SYNC_FLAG' +GOOGLE_FLAG = 'GOOGLE_FLAG' +INACTIVE_ACCOUNT_DISABLE_FLAG = 'INACTIVE_ACCOUNT_DISABLE_FLAG' +SAML_FLAG = 'SAML_FLAG' +SLACK_FLAG = 'SLACK_FLAG' +SUPERSET_FLAG = 'SUPERSET_FLAG' +VECTOR_DB_FLAG = 'VECTOR_DB_FLAG' + +feature_flag_config = { + AZURE_FLAG: os.environ.get(AZURE_FLAG, 'false'), + AZURE_OPENAI_FLAG: os.environ.get(AZURE_OPENAI_FLAG, 'false'), + CELERY_FLAG: os.environ.get(CELERY_FLAG, 'false'), + EMAIL_SYNC_FLAG: os.environ.get(EMAIL_SYNC_FLAG, 'false'), + GOOGLE_FLAG: os.environ.get(GOOGLE_FLAG, 'false'), + INACTIVE_ACCOUNT_DISABLE_FLAG: os.environ.get( + INACTIVE_ACCOUNT_DISABLE_FLAG, 'false' + ), + SAML_FLAG: os.environ.get(SAML_FLAG, 'false'), + SLACK_FLAG: os.environ.get(SLACK_FLAG, 'false'), + SUPERSET_FLAG: os.environ.get(SUPERSET_FLAG, 'false'), + VECTOR_DB_FLAG: os.environ.get(VECTOR_DB_FLAG, 'false'), +} + + +def is_feature_enabled(feature: str): + return feature_flag_config[feature] == 'true' diff --git a/wavefront/server/modules/common_module/common_module/log/logger.py b/wavefront/server/modules/common_module/common_module/log/logger.py new file mode 100644 index 00000000..8431cda8 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/log/logger.py @@ -0,0 +1,48 @@ +import logging +import os + +from ..middleware.request_id_middleware import get_current_request_id + + +class RequestAwareFormatter(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + request_id = get_current_request_id() + record.request_id = request_id + return super().format(record) + + +class RequestAwareLogger(logging.Logger): + def error(self, msg, *args, **kwargs): + """Override error method to always include exc_info=True.""" + if 'exc_info' not in kwargs: + kwargs['exc_info'] = True + super().error(msg, *args, **kwargs) + + +log_level = os.environ.get('LOG_LEVEL', 'INFO') +logging.getLogger('uvicorn').setLevel(log_level) + +log_format = ( + '%(asctime)s | %(levelname)-8s | %(name)s | %(request_id)s | ' + '%(filename)s:%(lineno)d | %(message)s' +) + +formatter = RequestAwareFormatter(fmt=log_format, datefmt='%Y-%m-%d %H:%M:%S') + +logging.setLoggerClass(RequestAwareLogger) + +logging.basicConfig( + level=log_level, + format=log_format, + datefmt='%Y-%m-%d %H:%M:%S', + force=True, # Override any existing configuration +) + +# Get root logger and apply custom formatter +root_logger = logging.getLogger() +for handler in root_logger.handlers: + handler.setFormatter(formatter) + +app_name = os.environ.get('APP_NAME', 'floware') + +logger = logging.getLogger(app_name) diff --git a/wavefront/server/modules/common_module/common_module/middleware/__init__.py b/wavefront/server/modules/common_module/common_module/middleware/__init__.py new file mode 100644 index 00000000..d5368497 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/middleware/__init__.py @@ -0,0 +1,3 @@ +from .request_id_middleware import RequestIdMiddleware, get_current_request_id + +__all__ = ['RequestIdMiddleware', 'get_current_request_id'] diff --git a/wavefront/server/modules/common_module/common_module/middleware/request_id_middleware.py b/wavefront/server/modules/common_module/common_module/middleware/request_id_middleware.py new file mode 100644 index 00000000..eac08cc6 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/middleware/request_id_middleware.py @@ -0,0 +1,73 @@ +import re +import secrets +import string +from typing import Callable +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from contextvars import ContextVar + +# Context variable to store current request ID +request_id_context: ContextVar[str] = ContextVar('request_id', default='NO-REQUEST-ID') + + +class RequestIdMiddleware(BaseHTTPMiddleware): + """ + Middleware to handle X-Flo-Request-ID header for request tracing with prefix support. + + Features: + - Checks for existing X-Flo-Request-ID header (case insensitive) + - Validates that the ID format is: prefix-[8-12 alphanumeric characters] + - Accepts prefixes: 'fe' (frontend) or 'be' (backend) + - Generates new ID with 'be' prefix if missing or invalid + - Stores ID in request.state and logging context + - Adds X-Flo-Request-ID to response headers + """ + + # Regex pattern for prefix-[8-12 alphanumeric characters] + REQUEST_ID_PATTERN = re.compile(r'^(fe|be)-[a-zA-Z0-9]{8,12}$') + VALID_PREFIXES = {'fe', 'be'} + + @staticmethod + def generate_request_id(prefix: str = 'be') -> str: + if prefix not in RequestIdMiddleware.VALID_PREFIXES: + prefix = 'be' # Default to backend + + length = secrets.randbelow(5) + 8 + alphabet = string.ascii_letters + string.digits + random_part = ''.join(secrets.choice(alphabet) for _ in range(length)) + return f'{prefix}-{random_part}' + + @staticmethod + def validate_request_id(request_id: str) -> bool: + return bool(RequestIdMiddleware.REQUEST_ID_PATTERN.match(request_id)) + + @staticmethod + def get_request_id_from_headers(request: Request) -> str | None: + """Extract X-Flo-Request-ID from headers (case insensitive).""" + for header_name, header_value in request.headers.items(): + if header_name.lower() == 'x-flo-request-id': + return header_value + return None + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + existing_request_id = self.get_request_id_from_headers(request) + + if existing_request_id and self.validate_request_id(existing_request_id): + request_id = existing_request_id + else: + request_id = self.generate_request_id('be') + + request.state.request_id = request_id + token = request_id_context.set(request_id) + + try: + response = await call_next(request) + response.headers['X-Flo-Request-ID'] = request_id + return response + finally: + request_id_context.reset(token) + + +def get_current_request_id() -> str: + """Get the current request ID from context.""" + return request_id_context.get() diff --git a/wavefront/server/modules/common_module/common_module/models/response.py b/wavefront/server/modules/common_module/common_module/models/response.py new file mode 100644 index 00000000..1a6c3754 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/models/response.py @@ -0,0 +1,34 @@ +from typing import Any, Dict, Generic, Optional, TypeVar + +from pydantic import BaseModel + + +class Meta(BaseModel): + status: str + code: int + error: Optional[str] = None + + +class ResponseModel(BaseModel): + meta: Meta + data: Optional[Dict[str, Any]] = None + + +# Generic type variable +T = TypeVar('T') + + +class GenericResponseModel(BaseModel, Generic[T]): + """Generic response model that can accept any type for data field""" + + meta: Meta + data: Optional[T] = None + + +class DataWrapper(BaseModel, Generic[T]): + """ + Generic wrapper for response data with message. + """ + + message: str + data: T diff --git a/wavefront/server/modules/common_module/common_module/prometheus/prometheus_middleware.py b/wavefront/server/modules/common_module/common_module/prometheus/prometheus_middleware.py new file mode 100644 index 00000000..c01ad77b --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/prometheus/prometheus_middleware.py @@ -0,0 +1,120 @@ +import time +from typing import Callable, Optional + +from fastapi import Request +from fastapi import Response +from prometheus_client import Counter +from prometheus_client import Gauge +from prometheus_client import Histogram +from prometheus_client import REGISTRY +from prometheus_client.openmetrics.exposition import generate_latest +from starlette.middleware.base import BaseHTTPMiddleware + + +class PrometheusMiddleware(BaseHTTPMiddleware): + _instance: Optional['PrometheusMiddleware'] = None + + def __init__(self, app): + super().__init__(app) + PrometheusMiddleware._instance = self + + # Common labels that will be used across all metrics + self.common_labels = ['module', 'instance'] + + # HTTP metrics + self.http_requests_total = Counter( + 'http_requests_total', + 'Total number of HTTP requests', + self.common_labels + ['method', 'endpoint', 'status_code'], + ) + + self.http_request_duration = Histogram( + 'http_request_duration_seconds', + 'HTTP request duration in seconds', + self.common_labels + ['method', 'endpoint'], + ) + + self.http_requests_in_progress = Gauge( + 'http_requests_in_progress', + 'Number of HTTP requests in progress', + self.common_labels + ['method', 'endpoint'], + ) + + self.http_errors_total = Counter( + 'http_errors_total', + 'Total number of HTTP errors', + self.common_labels + ['method', 'endpoint', 'status_code'], + ) + + @classmethod + def get_instance(cls) -> Optional['PrometheusMiddleware']: + """Get the singleton instance of PrometheusMiddleware""" + return cls._instance + + def get_labels(self, request: Request) -> dict: + """Extract common labels from request""" + return { + 'module': request.url.path.split('/')[3] + if len(request.url.path.split('/')) > 3 + else 'root', + 'instance': f'{request.client.host}:{request.url.port}' + if request.client + else 'unknown:unknown', + 'method': request.method, + 'endpoint': request.url.path, + } + + async def dispatch(self, request: Request, call_next: Callable) -> Response: + # Skip metrics endpoint to avoid infinite recursion + if request.url.path == '/v1/_metrics': + return await call_next(request) + + # Get common labels + labels = self.get_labels(request) + + # Record request start + self.http_requests_in_progress.labels(**labels).inc() + + # Start timing + start_time = time.time() + + try: + # Process the request + response = await call_next(request) + + # Track errors for 4xx and 5xx status codes - ADD THIS + if response and response.status_code >= 400: + self.http_errors_total.labels( + **labels, status_code=response.status_code + ).inc() + + # Record request duration + duration = time.time() - start_time + self.http_request_duration.labels(**labels).observe(duration) + + # Record request completion + self.http_requests_total.labels( + **labels, status_code=response.status_code + ).inc() + + return response + + except Exception as e: + # Record error + self.http_requests_total.labels( + **labels, status_code=getattr(e, 'status_code', 500) + ).inc() + + self.http_errors_total.labels( + **labels, status_code=getattr(e, 'status_code', 500) + ).inc() + + raise + finally: + # Decrement in-progress counter + self.http_requests_in_progress.labels(**labels).dec() + + @staticmethod + async def metrics_endpoint(request: Request) -> Response: + """Endpoint to expose Prometheus metrics""" + return Response(content=generate_latest(REGISTRY), media_type='text/plain') diff --git a/wavefront/server/modules/common_module/common_module/response_formatter.py b/wavefront/server/modules/common_module/common_module/response_formatter.py new file mode 100644 index 00000000..19eb1a80 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/response_formatter.py @@ -0,0 +1,16 @@ +from typing import Any + +from common_module.models.response import Meta +from common_module.models.response import ResponseModel + + +class ResponseFormatter: + def buildSuccessResponse(self, data: Any): + meta = Meta(status='success', code=1) + if hasattr(data, 'dict'): + data = data.dict() + return ResponseModel(meta=meta, data=data).model_dump() + + def buildErrorResponse(self, error: str): + meta = Meta(status='failure', code=-1, error=error) + return ResponseModel(meta=meta).model_dump() diff --git a/wavefront/server/modules/common_module/common_module/scheduler.py b/wavefront/server/modules/common_module/common_module/scheduler.py new file mode 100644 index 00000000..e9ed5bfc --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/scheduler.py @@ -0,0 +1,157 @@ +import os + +from apscheduler.events import EVENT_JOB_ERROR +from apscheduler.executors.pool import ThreadPoolExecutor +from apscheduler.jobstores.redis import RedisJobStore +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.triggers.cron import CronTrigger +from common_module.common_cache import CommonCache +from common_module.log.logger import logger + + +class Scheduler: + def __init__(self, redis_host: str, redis_port: int, cache_manager: CommonCache): + self.redis_store = RedisJobStore( + jobs_key='apscheduler.jobs', + run_times_key='apscheduler.run_times', + host=redis_host, + port=redis_port, + ) + + self.cache_manager: CommonCache = cache_manager + jobstores = {'default': self.redis_store} + + executors = {'default': ThreadPoolExecutor(os.cpu_count() - 1)} + job_defaults = { + 'coalesce': False, # to roll all these missed executions into one. + 'max_instances': 3, # used to define how many instances of a job are allowed to run concurrently. + } + self.scheduler = BackgroundScheduler( + jobstores=jobstores, + executors=executors, + job_defaults=job_defaults, + timezone='Asia/Kolkata', + ) + self.scheduler.add_listener(self.error_handler, EVENT_JOB_ERROR) + + def start_scheduler(self): + self.scheduler.remove_all_jobs() + self.redis_store.remove_all_jobs() + logger.debug(f'After removing: {self.scheduler.get_jobs()}') + if not self.scheduler.running: + self.scheduler.start() + + def register(self, task, cron_params, id): + try: + if not id: + raise ValueError('id must be provided for the task registration.') + + existing_job = self.scheduler.get_job(id) + lock_key = f'scheduler_lock:{id}' + if self.cache_manager.add(lock_key, '1', expiry=10, nx=True): + try: + if existing_job: + logger.info(f'{id} already exists. Skipping registration') + else: + logger.info(f'Creating new job {id}') + self.scheduler.add_job( + task, + trigger=CronTrigger(**cron_params), + id=id, + replace_existing=True, + ) + finally: + self.cache_manager.remove(lock_key) + else: + logger.info('job is getting handled by another function') + + except Exception as e: + logger.info(f'Error on task scheduling {e}') + + def is_cron_too_frequent(self, cron_params, min_interval_minutes=60): + """ + Check if a cron configuration runs more frequently than the specified minimum interval. + + Args: + cron_params (dict): Dictionary containing cron parameters + min_interval_minutes (int): Minimum allowed interval in minutes + + Returns: + tuple: (bool, str) - (True if too frequent, explanation message) + """ + minute = cron_params.get('minute', '0') + hour = cron_params.get('hour', '*') + day = cron_params.get('day', '*') + month = cron_params.get('month', '*') + day_of_week = cron_params.get('day_of_week', '*') + + # Check for wildcards in fields that would allow execution more than once per hour + if any( + [ + # If minute contains wildcard, comma, hyphen, or step + ('*' in minute or ',' in minute or '-' in minute or '/' in minute), + # If hour, day, month, and day_of_week are all wildcards + (hour == '*' and day == '*' and month == '*' and day_of_week == '*'), + ] + ): + # More complex patterns that could run more frequently than every hour + if '/' in minute: + # Check step values like */10 (every 10 minutes) + try: + step = int(minute.split('/')[1]) + if step < min_interval_minutes: + return ( + True, + f'Cron configured to run every {step} minutes, which is more frequent than the minimum allowed interval of {min_interval_minutes} minutes.', + ) + except (IndexError, ValueError): + pass + + # If there's a wildcard or complex pattern in the minute field, it might run multiple times per hour + if '*' in minute or ',' in minute or '-' in minute: + return ( + True, + f"Cron configuration '{minute} {hour} {day} {month} {day_of_week}' may run more frequently than every {min_interval_minutes} minutes.", + ) + + return False, 'Cron configuration meets the minimum interval requirements.' + + def validate_cron_frequency(self, cron_config, min_interval_minutes=60): + """ + Validate that the cron configuration doesn't run more frequently than specified. + + Args: + scheduler_config (dict): Dictionary containing scheduler configuration + min_interval_minutes (int): Minimum allowed interval in minutes + + Returns: + None + + Raises: + ValueError: If cron configuration would run more frequently than allowed + """ + cron_parts = cron_config.split() + if len(cron_parts) != 5: + raise ValueError( + f'Invalid cron configuration. Expected 5 parts but got {len(cron_parts)}' + ) + + cron_params = { + 'minute': cron_parts[0], + 'hour': cron_parts[1], + 'day': cron_parts[2], + 'month': cron_parts[3], + 'day_of_week': cron_parts[4], + } + + is_too_frequent, message = self.is_cron_too_frequent( + cron_params, min_interval_minutes + ) + if is_too_frequent: + raise ValueError(message) + + def error_handler(self, event): + logger.info(f'Error handler {event}') + + def log_all_jobs(self): + logger.info(self.scheduler.get_jobs()) diff --git a/wavefront/server/modules/common_module/common_module/security/__init__.py b/wavefront/server/modules/common_module/common_module/security/__init__.py new file mode 100644 index 00000000..0cc2fc03 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/security/__init__.py @@ -0,0 +1,5 @@ +"""Security utilities for authentication and authorization""" + +from common_module.security.bearer_auth import BearerAuth, bearer_auth + +__all__ = ['BearerAuth', 'bearer_auth'] diff --git a/wavefront/server/modules/common_module/common_module/security/bearer_auth.py b/wavefront/server/modules/common_module/common_module/security/bearer_auth.py new file mode 100644 index 00000000..d90e6330 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/security/bearer_auth.py @@ -0,0 +1,32 @@ +"""Bearer token authentication scheme for FastAPI""" + +from fastapi.security import HTTPBearer + + +class BearerAuth(HTTPBearer): + """ + Custom HTTPBearer authentication scheme that maps to 'BearerAuth' in OpenAPI. + + This provides a centralized Bearer token authentication scheme that can be + imported and used across all controllers to ensure consistent security + configuration. + + Usage: + from common_module.security.bearer_auth import bearer_auth + from fastapi import Security + + @router.get('/endpoint', dependencies=[Security(bearer_auth)]) + async def my_endpoint(): + ... + """ + + def __init__(self): + super().__init__( + auto_error=False, # Security validation handled by RequireAuthMiddleware + scheme_name='BearerAuth', # Must match OpenAPI components.securitySchemes name + description='Enter your JWT Bearer token', + ) + + +# Singleton instance to be imported and used across the application +bearer_auth = BearerAuth() diff --git a/wavefront/server/modules/common_module/common_module/utils/odata_parser.py b/wavefront/server/modules/common_module/common_module/utils/odata_parser.py new file mode 100644 index 00000000..f11ca447 --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/utils/odata_parser.py @@ -0,0 +1,120 @@ +from datetime import datetime +import os +import re +from typing import Any, Tuple + +from common_module.log.logger import logger + +cloud_provider = os.environ.get('CLOUD_PROVIDER') + + +def parse_value(value: str) -> Any: + """Converts string values to appropriate Python types.""" + if value.isdigit(): + return int(value) + if value.replace('.', '', 1).isdigit(): + return float(value) + try: + return datetime.fromisoformat(value) + except ValueError: + pass + return value.strip("'") + + +def prepare_odata_filter( + filter_expr: str, parameter: str = None, prefix: str = '' +) -> Tuple[str, dict]: + """Parses an OData-like filter expression and converts it into a SQL-like query with parameters.""" + if not filter_expr: + return None, None + + pattern = re.compile( + r'(\w+)\s+(eq|lte|gte|gt|lt|contains|in)\s+(\'[^\']*\'|"[^"]*"|\[[^\]]*\]|[^$()\s]+?)(?=\s*(?:\$and|\$or|\)|\s*$))' + ) + + ops = { + 'eq': '=', + 'gt': '>', + 'lt': '<', + 'lte': '<=', + 'gte': '>=', + 'contains': 'LIKE', + 'in': 'IN', + } + + # Replace AND / OR operators with SQL equivalents + sql_expr = filter_expr.replace('$and', 'AND').replace('$or', 'OR') + matches = pattern.findall(filter_expr) + + if not matches: + logger.error(f'Invalid filter {filter_expr}') + raise ValueError('Invalid filter expression') + + params = {} + param_count = {} + + to_replace: list[Tuple[str, str]] = [] + for field, operator, value in matches: + if operator not in ops: + logger.error(f'Unsupported operator {operator}') + raise ValueError(f'Unsupported operator: {operator}') + + if field in param_count: + param_count[field] += 1 + param_key = f'{prefix}{field}_{param_count[field]}' + else: + param_count[field] = 0 + param_key = f'{prefix}{field}' + + dynamic_var_char = ( + parameter if parameter else ('@' if cloud_provider == 'gcp' else ':') + ) + + if operator == 'contains': + parsed_value = parse_value(value) + params[param_key] = f'%{parsed_value}%' + new_expr = f'{field} {ops[operator]} {dynamic_var_char}{param_key}' + + elif operator == 'in': + items = value.strip('[]').split(',') + parsed_value = [v.strip().strip('\'"') for v in items] + + placeholder_keys = [] + for idx, val in enumerate(parsed_value): + item_key = f'{param_key}_{idx}' + params[item_key] = val + placeholder_keys.append(f'{dynamic_var_char}{item_key}') + new_expr = f"{field} IN ({', '.join(placeholder_keys)})" + else: + parsed_value = parse_value(value) + params[param_key] = parsed_value + new_expr = f'{field} {ops[operator]} {dynamic_var_char}{param_key}' + + old_expr = f'{field} {operator} {value}' + to_replace.append((old_expr, new_expr)) + + sorted_matches = sorted(to_replace, key=lambda x: len(x[0]), reverse=True) + for old_expr, new_expr in sorted_matches: + sql_expr = sql_expr.replace(old_expr, new_expr) + + return sql_expr, params + + +def fill_odata_query(sql_expr: str, parameters: dict = {}) -> str: + output_sql = sql_expr + dynamic_var_char = '@' if cloud_provider == 'gcp' else ':' + param_names = sorted(parameters.keys(), key=len, reverse=True) + for parameter in param_names: + if isinstance(parameters[parameter], str): + output_sql = output_sql.replace( + f'{dynamic_var_char}{parameter}', f"'{parameters[parameter]}'" + ) + if isinstance(parameters[parameter], int): + output_sql = output_sql.replace( + f'{dynamic_var_char}{parameter}', str(parameters[parameter]) + ) + else: + logger.warning( + f'Unsupported parameter type for {parameter}: {type(parameters[parameter])}' + ) + return output_sql diff --git a/wavefront/server/modules/common_module/common_module/utils/serializer.py b/wavefront/server/modules/common_module/common_module/utils/serializer.py new file mode 100644 index 00000000..79406aef --- /dev/null +++ b/wavefront/server/modules/common_module/common_module/utils/serializer.py @@ -0,0 +1,33 @@ +from datetime import datetime, date +import uuid + + +def serialize_values(input): + # Handle non-dict inputs + if not isinstance(input, dict): + if isinstance(input, uuid.UUID): + return str(input) + elif isinstance(input, (datetime, date)): + return input.isoformat() + elif isinstance(input, list): + return [serialize_values(item) for item in input] + elif hasattr(input, '_asdict'): + return serialize_values(input._asdict()) + else: + return input + + result = {} + for column in input.keys(): + value = input.get(column, None) + if isinstance(value, uuid.UUID): + result[column] = str(value) + elif isinstance(value, (datetime, date)): + result[column] = value.isoformat() + elif isinstance(value, dict): + result[column] = serialize_values(value) + elif isinstance(value, list): + result[column] = [serialize_values(item) for item in value] + else: + result[column] = value + + return result diff --git a/wavefront/server/modules/common_module/pyproject.toml b/wavefront/server/modules/common_module/pyproject.toml new file mode 100644 index 00000000..e2be91a8 --- /dev/null +++ b/wavefront/server/modules/common_module/pyproject.toml @@ -0,0 +1,35 @@ +[project] +name = "common-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "fastapi>=0.115.2,<1.0.0", + "loguru>=0.7.2,<1.0.0", + "dependency-injector>=4.42.0,<5.0.0", + "apscheduler>=3.11.0,<4.0.0", + "redis>=5.2.1,<6.0.0", + "prometheus-client>=0.22.1,<1.0.0" +] + +[dependency-groups] +dev = [ + "pytest>=8.3.4,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0" +] + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["common_module"] diff --git a/wavefront/server/modules/common_module/tests/conftest.py b/wavefront/server/modules/common_module/tests/conftest.py new file mode 100644 index 00000000..f9eb6955 --- /dev/null +++ b/wavefront/server/modules/common_module/tests/conftest.py @@ -0,0 +1,59 @@ +""" +Test configuration for common module tests. +Sets up mock FastAPI app with middleware for testing. +""" + +import pytest +from fastapi import FastAPI, Request +from fastapi.testclient import TestClient + +# Import your middleware +from common_module.middleware.request_id_middleware import RequestIdMiddleware + + +@pytest.fixture +def mock_app(): + """Create a minimal FastAPI app with RequestIdMiddleware for testing.""" + app = FastAPI(title='Test App') + + # Add your middleware + app.add_middleware(RequestIdMiddleware) + + # Add test endpoints + @app.get('/test') + async def test_endpoint(request: Request): + """Test endpoint that returns request info.""" + request_id = getattr(request.state, 'request_id', 'NOT_FOUND') + return { + 'message': 'Test successful', + 'request_id_in_state': request_id, + } + + @app.get('/metrics') + async def metrics_endpoint(): + """Mock metrics endpoint similar to /v1/_metrics.""" + return {'metrics': 'mock_data'} + + @app.get('/error') + async def error_endpoint(): + """Endpoint that raises an error for testing error handling.""" + raise Exception('Test error') + + return app + + +@pytest.fixture +def client(mock_app): + """Create TestClient with the mock app.""" + return TestClient(mock_app) + + +@pytest.fixture +def mock_request(): + """Create a mock request object for unit testing.""" + from unittest.mock import Mock + + mock_req = Mock() + mock_req.headers = {} + mock_req.state = Mock() + return mock_req diff --git a/wavefront/server/modules/common_module/tests/test_odata_parser.py b/wavefront/server/modules/common_module/tests/test_odata_parser.py new file mode 100644 index 00000000..bf4e6e0f --- /dev/null +++ b/wavefront/server/modules/common_module/tests/test_odata_parser.py @@ -0,0 +1,184 @@ +from datetime import datetime +import os + +from common_module.utils.odata_parser import fill_odata_query +from common_module.utils.odata_parser import prepare_odata_filter +import pytest + +# Set cloud provider for testing +os.environ['CLOUD_PROVIDER'] = 'gcp' + + +def test_basic_equality_filter(): + filter_expr = "name eq 'John'" + expected_sql = 'name = @name' + expected_params = {'name': 'John'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_equality_filter_with_quotes(): + filter_expr = "branch eq 'Agar - (MP) 5323'" + expected_sql = 'branch = @branch' + expected_params = {'branch': 'Agar - (MP) 5323'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_numeric_comparison(): + filter_expr = 'age gt 25' + expected_sql = 'age > @age' + expected_params = {'age': 25} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_date_comparison(): + filter_expr = 'created_at gt 2024-01-01T00:00:00' + expected_sql = 'created_at > @created_at' + expected_params = {'created_at': datetime(2024, 1, 1, 0, 0)} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_contains_operator(): + filter_expr = "description contains 'test'" + expected_sql = 'description LIKE @description' + expected_params = {'description': '%test%'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_in_operator(): + filter_expr = "status in ['active', 'pending']" + expected_sql = 'status IN (@status_0, @status_1)' + expected_params = {'status_0': 'active', 'status_1': 'pending'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_and_condition(): + filter_expr = "age gt 25 $and status eq 'active'" + expected_sql = 'age > @age AND status = @status' + expected_params = {'age': 25, 'status': 'active'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_or_condition(): + filter_expr = "status eq 'active' $or status eq 'pending'" + expected_sql = 'status = @status OR status = @status_1' + expected_params = {'status': 'active', 'status_1': 'pending'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_or_condition_with_quotes(): + filter_expr = "(branch eq 'Agar - (MP) 5323' $or created_at gt 2025-05-04T05:59:56)" + expected_sql = '(branch = @branch OR created_at > @created_at)' + expected_params = { + 'branch': 'Agar - (MP) 5323', + 'created_at': datetime(2025, 5, 4, 5, 59, 56), + } + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_custom_parameter_prefix(): + filter_expr = "name eq 'John'" + expected_sql = 'name = :name' + expected_params = {'name': 'John'} + sql_expr, params = prepare_odata_filter(filter_expr, parameter=':') + assert sql_expr == expected_sql + assert params == expected_params + + +def test_empty_filter(): + sql_expr, params = prepare_odata_filter('') + assert sql_expr is None + assert params is None + + +def test_invalid_operator(): + with pytest.raises(ValueError, match='Invalid filter expression'): + prepare_odata_filter("name invalid_op 'John'") + + +def test_invalid_filter_format(): + with pytest.raises(ValueError, match='Invalid filter expression'): + prepare_odata_filter('invalid filter format') + + +def test_multiple_conditions_with_same_field(): + filter_expr = "status eq 'active' $and status eq 'pending'" + expected_sql = 'status = @status AND status = @status_1' + expected_params = {'status': 'active', 'status_1': 'pending'} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_less_than_or_equal(): + filter_expr = 'age lte 30' + expected_sql = 'age <= @age' + expected_params = {'age': 30} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_greater_than_or_equal(): + filter_expr = 'age gte 18' + expected_sql = 'age >= @age' + expected_params = {'age': 18} + sql_expr, params = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_greater_than_or_equal_filling(): + filter_expr = 'age gte 18' + expected_sql = 'age >= 18' + sql_expr, params = prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_same_field_filling(): + filter_expr = "status eq 'active' $and status eq 'pending'" + expected_sql = "status = 'active' AND status = 'pending'" + sql_expr, params = prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_loan_amt(): + filter_expr = "created_at gt 2025-07-23T07:42:44 $and (loan_id contains '96444' $or branch contains '96444' $or region contains '96444' $or zone contains '96444' $or loan_amount eq '96444')" + expected_sql = "created_at > @created_at AND (loan_id LIKE '%96444%' OR branch LIKE '%96444%' OR region LIKE '%96444%' OR zone LIKE '%96444%' OR loan_amount = '96444')" + sql_expr, params = prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_contains(): + filter_expr = '(loan_amount gt 50000 $and loan_amount lt 100000 $or loan_amount gt 100000 $and loan_amount lt 250000 $or loan_amount gt 250000 $and loan_amount lt 500000 $or loan_amount gt 500000) $and created_at gt 2025-07-23T08:03:37' + expected_sql = '(loan_amount > @loan_amount AND loan_amount < @loan_amount_1 OR loan_amount > @loan_amount_2 AND loan_amount < @loan_amount_3 OR loan_amount > @loan_amount_4 AND loan_amount < @loan_amount_5 OR loan_amount > @loan_amount_6) AND created_at > @created_at' + sql_expr, _ = prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + + +def test_multiple_conditions_with_float(): + filter_expr = '(gold_purity gt 91.67) $and created_at gt 2025-07-23T10:07:03' + expected_sql = '(gold_purity > @gold_purity) AND created_at > @created_at' + sql_expr, params = prepare_odata_filter(filter_expr) + assert params['gold_purity'] == 91.67 + assert sql_expr == expected_sql diff --git a/wavefront/server/modules/common_module/tests/test_request_id_middleware.py b/wavefront/server/modules/common_module/tests/test_request_id_middleware.py new file mode 100644 index 00000000..f88f63e4 --- /dev/null +++ b/wavefront/server/modules/common_module/tests/test_request_id_middleware.py @@ -0,0 +1,243 @@ +import pytest +from fastapi.testclient import TestClient +from common_module.middleware.request_id_middleware import RequestIdMiddleware + + +class TestRequestIdMiddlewareUnit: + """Unit tests for individual middleware methods.""" + + def test_validate_request_id_valid_cases(self): + """Test request ID validation for valid cases.""" + valid_ids = [ + 'fe-abc12345', # minimum fe + 'be-xyz67890', # minimum be + 'fe-AbC123dEf', # mixed case + 'be-123456789012', # maximum length (12 chars) + 'fe-a1b2c3d4', # mixed alphanumeric + ] + + for request_id in valid_ids: + assert RequestIdMiddleware.validate_request_id( + request_id + ), f'Should be valid: {request_id}' + + def test_validate_request_id_invalid_cases(self): + """Test request ID validation for invalid cases.""" + invalid_ids = [ + 'invalid-id', # wrong prefix + 'fe-abc123', # too short (7 chars) + 'be-abc1234567890', # too long (13 chars) + 'fe-abc123!@', # invalid characters + 'xx-abc12345', # invalid prefix + '', # empty + 'fe-', # no random part + 'abc12345', # no prefix + 'FE-abc12345', # uppercase prefix + 'fe_abc12345', # underscore instead of dash + ] + + for request_id in invalid_ids: + assert not RequestIdMiddleware.validate_request_id( + request_id + ), f'Should be invalid: {request_id}' + + def test_generate_request_id_format(self): + """Test that generated IDs have correct format.""" + # Test backend prefix + be_id = RequestIdMiddleware.generate_request_id('be') + assert be_id.startswith('be-') + assert 11 <= len(be_id) <= 15 # be- + 8-12 chars + assert RequestIdMiddleware.validate_request_id(be_id) + + # Test frontend prefix + fe_id = RequestIdMiddleware.generate_request_id('fe') + assert fe_id.startswith('fe-') + assert 11 <= len(fe_id) <= 15 # fe- + 8-12 chars + assert RequestIdMiddleware.validate_request_id(fe_id) + + # Test invalid prefix defaults to 'be' + default_id = RequestIdMiddleware.generate_request_id('invalid') + assert default_id.startswith('be-') + + # Test multiple generations are unique + ids = set() + for _ in range(100): + new_id = RequestIdMiddleware.generate_request_id('be') + assert new_id not in ids, 'Generated IDs should be unique' + ids.add(new_id) + + def test_get_request_id_from_headers_case_insensitive(self, mock_request): + """Test header extraction is case insensitive.""" + test_cases = [ + ('X-Flo-Request-ID', 'fe-standard'), + ('x-flo-request-id', 'fe-lower'), + ('X-FLO-REQUEST-ID', 'fe-upper'), + ('X-flo-Request-Id', 'fe-mixed'), + ('x-Flo-REQUEST-id', 'be-weird'), + ] + + for header_name, expected_value in test_cases: + mock_request.headers = {header_name: expected_value} + result = RequestIdMiddleware.get_request_id_from_headers(mock_request) + assert ( + result == expected_value + ), f'Should find {expected_value} in header {header_name}' + + def test_get_request_id_from_headers_not_found(self, mock_request): + """Test when request ID header is not present.""" + mock_request.headers = { + 'Content-Type': 'application/json', + 'Authorization': 'Bearer token', + } + result = RequestIdMiddleware.get_request_id_from_headers(mock_request) + assert result is None + + +class TestRequestIdMiddlewareMockIntegration: + """Integration tests using mock FastAPI app.""" + + @pytest.mark.parametrize( + 'header_name,request_id', + [ + ('X-Flo-Request-ID', 'fe-abc12345'), + ('x-flo-request-id', 'fe-def67890'), + ('X-FLO-REQUEST-ID', 'be-ghi12345'), + ('X-flo-request-id', 'fe-jkl67890'), + ('x-Flo-Request-Id', 'be-mno12345'), + ('X-FLO-request-ID', 'fe-pqr67890'), + ('x-flo-REQUEST-id', 'be-stu12345'), + ], + ) + def test_case_insensitive_headers( + self, client: TestClient, header_name: str, request_id: str + ): + """Test that X-Flo-Request-ID header is case insensitive.""" + response = client.get('/test', headers={header_name: request_id}) + + # Check response header + returned_id = response.headers.get('X-Flo-Request-ID') + assert returned_id == request_id, f'Expected {request_id}, got {returned_id}' + + # Check that middleware stored it in request state + assert response.json()['request_id_in_state'] == request_id + + @pytest.mark.parametrize( + 'request_id,expected_prefix,description', + [ + # Valid cases (should be preserved) + ('fe-abc12345', 'fe-abc12345', 'Valid frontend ID (8 chars)'), + ('be-xyz123456789', 'be-xyz123456789', 'Valid backend ID (12 chars)'), + ('fe-AbC123dEf', 'fe-AbC123dEf', 'Valid mixed case alphanumeric'), + # Invalid cases (should generate new be-* ID) + ('invalid-id', 'be-', 'Invalid format - no prefix'), + ('fe-abc123', 'be-', 'Too short (7 chars)'), + ('be-abc1234567890', 'be-', 'Too long (13 chars)'), + ('fe-abc123!@', 'be-', 'Invalid characters'), + ('xx-abc12345', 'be-', 'Invalid prefix'), + ('', 'be-', 'Empty ID'), + ], + ) + def test_request_id_validation( + self, + client: TestClient, + request_id: str, + expected_prefix: str, + description: str, + ): + """Test request ID validation and generation.""" + headers = {'X-Flo-Request-ID': request_id} if request_id else {} + response = client.get('/test', headers=headers) + + returned_id = response.headers.get('X-Flo-Request-ID') + assert ( + returned_id is not None + ), 'Response should always contain X-Flo-Request-ID header' + + if expected_prefix == returned_id: + # Exact match expected + assert returned_id == expected_prefix + elif expected_prefix.endswith('-'): + # Should generate new ID with expected prefix + assert returned_id.startswith( + expected_prefix + ), f"Expected ID to start with '{expected_prefix}', got '{returned_id}'" + assert len(returned_id) >= 11, f'Generated ID too short: {returned_id}' + else: + # Valid ID should be preserved + assert returned_id == expected_prefix + + def test_no_request_id_header(self, client: TestClient): + """Test behavior when no X-Flo-Request-ID header is provided.""" + response = client.get('/test') + + returned_id = response.headers.get('X-Flo-Request-ID') + assert ( + returned_id is not None + ), 'Response should always contain X-Flo-Request-ID header' + assert returned_id.startswith( + 'be-' + ), f"Generated ID should start with 'be-', got: {returned_id}" + assert len(returned_id) >= 11, f'Generated ID too short: {returned_id}' + + # Check it's stored in request state + assert response.json()['request_id_in_state'] == returned_id + + @pytest.mark.parametrize( + 'input_headers', + [ + {'X-Flo-Request-ID': 'fe-test12345'}, + {'x-flo-request-id': 'be-test67890'}, + {}, # no header + ], + ) + def test_response_headers_always_present( + self, client: TestClient, input_headers: dict + ): + """Test that response headers always contain X-Flo-Request-ID.""" + response = client.get('/metrics', headers=input_headers) + + returned_id = response.headers.get('X-Flo-Request-ID') + assert ( + returned_id is not None + ), 'Response should always contain X-Flo-Request-ID header' + assert returned_id.startswith( + ('fe-', 'be-') + ), f'Invalid ID prefix: {returned_id}' + assert len(returned_id) >= 11, f'ID too short: {returned_id}' + + def test_middleware_state_persistence(self, client: TestClient): + """Test that request ID persists in request state throughout request lifecycle.""" + response = client.get('/test', headers={'X-Flo-Request-ID': 'fe-persist123'}) + + # Verify the middleware stored the request ID in request.state + assert response.json()['request_id_in_state'] == 'fe-persist123' + # Verify the middleware added it to response headers + assert response.headers.get('X-Flo-Request-ID') == 'fe-persist123' + + def test_request_id_format_edge_cases(self, client: TestClient): + """Test specific format requirements for request IDs.""" + test_cases = [ + ('fe-abcd1234', True, 'Minimum valid length'), # 8 chars + ('be-abcd12345678', True, 'Maximum valid length'), # 12 chars + ('fe-abc1234a', True, 'Mixed alphanumeric'), + ('be-12345678', True, 'All numbers'), + ('fe-abcdefgh', True, 'All letters'), + ] + + for test_id, should_be_valid, description in test_cases: + response = client.get('/test', headers={'X-Flo-Request-ID': test_id}) + returned_id = response.headers.get('X-Flo-Request-ID') + + if should_be_valid: + assert ( + returned_id == test_id + ), f'{description}: {test_id} should be preserved' + else: + assert ( + returned_id != test_id + ), f'{description}: {test_id} should be rejected' + assert returned_id.startswith('be-'), 'Should generate new be- ID' + + +if __name__ == '__main__': + pytest.main([__file__, '-v']) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic.ini b/wavefront/server/modules/db_repo_module/db_repo_module/alembic.ini new file mode 100644 index 00000000..47a89580 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic.ini @@ -0,0 +1,82 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = %(here)s/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = %(db_url)s + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/env.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/env.py new file mode 100644 index 00000000..0d853c75 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/env.py @@ -0,0 +1,147 @@ +import os + +from alembic import context +from db_repo_module.database.base import Base +from db_repo_module.models.documents import Document +from db_repo_module.models.email import Email +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.notification_users import NotificationUser +from db_repo_module.models.notifications import Notification +from db_repo_module.models.oauth_credential import OAuthCredential +from db_repo_module.models.resource import Resource +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.saml_config import SAMLConfig +from db_repo_module.models.session import Session +from db_repo_module.models.task import Task +from db_repo_module.models.team import Team +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from db_repo_module.models.datasource import Datasource +from db_repo_module.models.model_schema import ModelSchema +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.models.image_search_models import ( + ReferenceImageFeatures, + SIFTFeatures, +) +from db_repo_module.models.ikb_models import ImageKnowledgeBase +from db_repo_module.models.telephony_config import TelephonyConfig +from db_repo_module.models.tts_config import TtsConfig +from db_repo_module.models.stt_config import SttConfig +from db_repo_module.models.voice_agent import VoiceAgent +from db_repo_module.models.message_processors import MessageProcessors +from dotenv import load_dotenv +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +models = [ + Document, + Email, + OAuthCredential, + Role, + SAMLConfig, + Task, + Team, + Session, + User, + Notification, + NotificationUser, + Resource, + RoleResource, + UserRole, + KnowledgeBase, + KnowledgeBaseDocuments, + KnowledgeBaseEmbeddings, + KnowledgeBaseInferences, + Datasource, + ModelSchema, + LlmInferenceConfig, + ReferenceImageFeatures, + SIFTFeatures, + ImageKnowledgeBase, + TelephonyConfig, + TtsConfig, + SttConfig, + VoiceAgent, + MessageProcessors, +] +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +load_dotenv() +db_user_name = os.getenv('DB_USERNAME') +db_password = os.getenv('DB_PASSWORD') +db_host = os.getenv('DB_HOST') +db_port = os.getenv('DB_PORT') +db_name = os.getenv('DB_NAME') + +db_url = f'postgresql://{db_user_name}:{db_password}@{db_host}:{db_port}/{db_name}' + +config.set_main_option('sqlalchemy.url', db_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + + url = config.get_main_option('sqlalchemy.url') + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={'paramstyle': 'named'}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/script.py.mako b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/script.py.mako new file mode 100644 index 00000000..fbc4b07d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1326-f6b7ce8e5b03_create_a_baseline_migrations.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1326-f6b7ce8e5b03_create_a_baseline_migrations.py new file mode 100644 index 00000000..56f2426c --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1326-f6b7ce8e5b03_create_a_baseline_migrations.py @@ -0,0 +1,138 @@ +"""Create a baseline migrations + +Revision ID: f6b7ce8e5b03 +Revises: +Create Date: 2024-12-04 13:26:56.138406 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'f6b7ce8e5b03' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'document', + sa.Column('document_id', sa.String(), nullable=False), + sa.Column('document_name', sa.String(), nullable=False), + sa.Column('last_update_timestamp', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('document_id'), + ) + op.create_index( + op.f('ix_document_document_id'), 'document', ['document_id'], unique=False + ) + op.create_table( + 'email', + sa.Column('id', sa.String(), nullable=False), + sa.Column('thread_id', sa.String(), nullable=False), + sa.Column('account_id', sa.String(), nullable=False), + sa.Column('content', sa.String(), nullable=False), + sa.Column('synced_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_email_id'), 'email', ['id'], unique=False) + op.create_table( + 'oauth_credential', + sa.Column('id', sa.String(), nullable=False), + sa.Column('email', sa.String(), nullable=False), + sa.Column('provider', sa.String(), nullable=False), + sa.Column('access_token', sa.String(), nullable=False), + sa.Column('refresh_token', sa.String(), nullable=False), + sa.Column('token_uri', sa.String(), nullable=True), + sa.Column('client_id', sa.String(), nullable=True), + sa.Column('client_secret', sa.String(), nullable=True), + sa.Column('scopes', sa.JSON(), nullable=False), + sa.Column('expiry', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_oauth_credential_id'), 'oauth_credential', ['id'], unique=False + ) + op.create_table( + 'role', + sa.Column('id', sa.String(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_role_id'), 'role', ['id'], unique=False) + op.create_table( + 'saml_config', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('entity_id', sa.String(), nullable=False), + sa.Column('sso_url', sa.String(), nullable=False), + sa.Column('slo_url', sa.String(), nullable=True), + sa.Column('x509_certificate', sa.String(), nullable=False), + sa.Column('name_id_format', sa.String(), nullable=True), + sa.Column('metadata_xml', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=True), + sa.Column('created_by', sa.UUID(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_saml_config_is_active'), 'saml_config', ['is_active'], unique=False + ) + op.create_table( + 'task', + sa.Column('message_id', sa.String(), nullable=False), + sa.Column('thread_id', sa.String(), nullable=False), + sa.Column('account_id', sa.String(), nullable=False), + sa.Column('sender', sa.String(), nullable=False), + sa.Column('title', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.Column('priority', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('message_id'), + ) + op.create_table( + 'team', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_team_id'), 'team', ['id'], unique=False) + op.create_table( + 'user', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('email', sa.String(), nullable=True), + sa.Column('password', sa.String(), nullable=True), + sa.Column('first_name', sa.String(), nullable=False), + sa.Column('last_name', sa.String(), nullable=False), + sa.Column('team_id', sa.String(), nullable=True), + sa.Column('role_id', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_user_email'), table_name='user') + op.drop_table('user') + op.drop_index(op.f('ix_team_id'), table_name='team') + op.drop_table('team') + op.drop_table('task') + op.drop_index(op.f('ix_saml_config_is_active'), table_name='saml_config') + op.drop_table('saml_config') + op.drop_index(op.f('ix_role_id'), table_name='role') + op.drop_table('role') + op.drop_index(op.f('ix_oauth_credential_id'), table_name='oauth_credential') + op.drop_table('oauth_credential') + op.drop_index(op.f('ix_email_id'), table_name='email') + op.drop_table('email') + op.drop_index(op.f('ix_document_document_id'), table_name='document') + op.drop_table('document') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1327-17c4ba1a32fe_initializing_the_role_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1327-17c4ba1a32fe_initializing_the_role_table.py new file mode 100644 index 00000000..ddc75bde --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_04_1327-17c4ba1a32fe_initializing_the_role_table.py @@ -0,0 +1,98 @@ +"""initializing the role table + +Revision ID: 17c4ba1a32fe +Revises: f6b7ce8e5b03 +Create Date: 2024-12-04 13:27:39.664369 + +""" + +import os +from typing import Sequence, Union +import uuid + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = '17c4ba1a32fe' +down_revision: Union[str, None] = 'f6b7ce8e5b03' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + + try: + if 'role' in inspector.get_table_names(): + admin_id = uuid.uuid4() + conn.execute( + sa.text( + """ + INSERT INTO role (id, name, description) + VALUES (:id, :name, :description) + """ + ), + [ + { + 'id': admin_id, + 'name': 'admin', + 'description': 'Admin role with full permissions', + }, + { + 'id': uuid.uuid4(), + 'name': 'read', + 'description': 'read role with limited permission', + }, + { + 'id': uuid.uuid4(), + 'name': 'read_write', + 'description': 'read and write role with moderate permission', + }, + ], + ) + email = os.getenv('EMAIL') + password = os.getenv('PASSWORD') + f_name = os.getenv('FIRST_NAME') + l_name = os.getenv('LAST_NAME') + conn.execute( + sa.text( + """ + INSERT INTO "user" (id, email, password, first_name, last_name, team_id, role_id) + VALUES (:id, :email, :password, :first_name, :last_name, :team_id, :role_id) + """ + ), + { + 'id': uuid.uuid4(), + 'email': email, + 'password': password, + 'first_name': f_name, + 'last_name': l_name, + 'team_id': 'team1', + 'role_id': admin_id, + }, + ) + + except Exception as e: + raise e + + +def downgrade() -> None: + op.execute( + sa.text( + """ + DELETE FROM "user" + WHERE email = 'sanosh@example.com'; + """ + ) + ) + op.execute( + sa.text( + """ + DELETE FROM "role" + WHERE name='admin'; + """ + ) + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_09_1030-756caddfb44b_truncating_role_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_09_1030-756caddfb44b_truncating_role_table.py new file mode 100644 index 00000000..46bf2b9d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_09_1030-756caddfb44b_truncating_role_table.py @@ -0,0 +1,58 @@ +"""truncating role table + +Revision ID: 756caddfb44b +Revises: 17c4ba1a32fe +Create Date: 2024-12-09 10:30:19.749358 + +""" + +import os +from typing import Sequence, Union +import uuid + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '756caddfb44b' +down_revision: Union[str, None] = '17c4ba1a32fe' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + + try: + conn.execute(sa.text("""TRUNCATE TABLE "user" """)) + result = conn.execute(sa.text("SELECT id FROM role WHERE name = 'admin'")) + admin_id = result.scalar() + if not admin_id: + raise ValueError('Admin role not found in the role table.') + email = os.getenv('EMAIL') + password = os.getenv('PASSWORD') + f_name = os.getenv('FIRST_NAME') + l_name = os.getenv('LAST_NAME') + conn.execute( + sa.text( + """ + INSERT INTO "user" (id, email, password, first_name, last_name, team_id, role_id) + VALUES (:id, :email, :password, :first_name, :last_name, :team_id, :role_id) + """ + ), + { + 'id': uuid.uuid4(), + 'email': email, + 'password': password, + 'first_name': f_name, + 'last_name': l_name, + 'team_id': 'team-1', + 'role_id': admin_id, + }, + ) + except Exception as e: + raise e + + +def downgrade() -> None: + pass diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_17_1412-01a4c5202566_hash_existing_passwords.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_17_1412-01a4c5202566_hash_existing_passwords.py new file mode 100644 index 00000000..b1b222f1 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2024_12_17_1412-01a4c5202566_hash_existing_passwords.py @@ -0,0 +1,45 @@ +"""hash_existing_passwords + +Revision ID: 01a4c5202566 +Revises: 756caddfb44b +Create Date: 2024-12-17 14:12:08.397545 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from user_management_module.utils.password_utils import hash_password + +# revision identifiers, used by Alembic. +revision: str = '01a4c5202566' +down_revision: Union[str, None] = '756caddfb44b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + + result = conn.execute( + sa.text('SELECT id, password FROM "user" WHERE password IS NOT NULL') + ) + users = result.fetchall() + + for user in users: + user_id, plain_password = user + + if plain_password.startswith('$2b$'): + continue + + hashed_password = hash_password(plain_password) + + conn.execute( + sa.text('UPDATE "user" SET password = :password WHERE id = :id'), + {'password': hashed_password, 'id': user_id}, + ) + + +def downgrade() -> None: + pass diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py new file mode 100644 index 00000000..f906b47b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py @@ -0,0 +1,49 @@ +"""for actionable insights + +Revision ID: c7800bd1d9c3 +Revises: 01a4c5202566 +Create Date: 2025-02-10 11:33:33.664976 + +""" + +from typing import Sequence, Union +import uuid + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.ext.mutable import MutableDict + +# revision identifiers, used by Alembic. +revision: str = 'c7800bd1d9c3' +down_revision: Union[str, None] = '01a4c5202566' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'actionable_alerts', + sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), + sa.Column('signal_id', sa.String, nullable=False), + sa.Column('title', sa.String, nullable=True), + sa.Column('description', sa.String, nullable=True), + sa.Column('signal_type', sa.String, nullable=False), + sa.Column('alerts', MutableDict.as_mutable(JSONB), nullable=True), + sa.Column('data', MutableDict.as_mutable(JSONB), nullable=True), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() + ), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + ) + + +def downgrade() -> None: + op.drop_table('actionable_alerts') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py new file mode 100644 index 00000000..a54f917e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py @@ -0,0 +1,28 @@ +"""create signal name in actionable alerts + +Revision ID: 76ba9543af92 +Revises: c7800bd1d9c3 +Create Date: 2025-02-18 17:51:32.463298 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '76ba9543af92' +down_revision: Union[str, None] = 'c7800bd1d9c3' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + 'actionable_alerts', sa.Column('signal_name', sa.String, nullable=True) + ) + + +def downgrade() -> None: + op.drop_column('actionable_alerts', 'signal_name') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py new file mode 100644 index 00000000..0507f025 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py @@ -0,0 +1,43 @@ +"""Remove all wrongly generated alerts + +Revision ID: f9c4c1c48d46 +Revises: 76ba9543af92 +Create Date: 2025-02-22 12:36:16.510535 + +""" + +from typing import Sequence, Union + +from alembic import op +from sqlalchemy.orm import Session +from sqlalchemy.sql import text + +# revision identifiers, used by Alembic. +revision: str = 'f9c4c1c48d46' +down_revision: Union[str, None] = '76ba9543af92' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + bind = op.get_bind() + session = Session(bind=bind) + + latest_20_rows = session.execute( + text('SELECT id FROM actionable_alerts ORDER BY created_at DESC LIMIT 20') + ).fetchall() + + if latest_20_rows: + # Extract the 20 IDs to keep + ids_to_keep = tuple(row[0] for row in latest_20_rows) + + # Delete all rows except these 20 + session.execute( + text('DELETE FROM actionable_alerts WHERE id NOT IN :ids'), + {'ids': ids_to_keep}, + ) + session.commit() + + +def downgrade() -> None: + pass diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py new file mode 100644 index 00000000..a3edd9a1 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py @@ -0,0 +1,56 @@ +"""adding notification + +Revision ID: 78655faf6488 +Revises: f9c4c1c48d46 +Create Date: 2025-02-28 16:02:31.108730 + +""" + +from typing import Sequence, Union +import uuid + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '78655faf6488' +down_revision: Union[str, None] = 'f9c4c1c48d46' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'notification', + sa.Column('id', sa.UUID(), primary_key=True, default=uuid.uuid4), + sa.Column('type', sa.String(), nullable=True), + sa.Column('title', sa.String(), nullable=True), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() + ), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + ) + + op.create_table( + 'notification_user', + sa.Column('id', sa.UUID(), primary_key=True, default=uuid.uuid4), + sa.Column('user_id', sa.UUID(), sa.ForeignKey('user.id'), nullable=False), + sa.Column( + 'notification_id', + sa.UUID(), + sa.ForeignKey('notification.id'), + nullable=False, + ), + sa.Column('seen', sa.Boolean(), nullable=False, server_default=sa.false()), + ) + + +def downgrade() -> None: + op.drop_table('notification_user') + op.drop_table('notification') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_25_1855-9b10292a95eb_rbac_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_25_1855-9b10292a95eb_rbac_tables.py new file mode 100644 index 00000000..766b2c2e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_25_1855-9b10292a95eb_rbac_tables.py @@ -0,0 +1,238 @@ +"""RBAC tables + +Revision ID: 9b10292a95eb +Revises: 78655faf6488 +Create Date: 2025-03-25 18:55:15.399620 + +""" + +from typing import Sequence, Union +import uuid + +from alembic import op +from db_repo_module.models.resource import ResourceScope +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '9b10292a95eb' +down_revision: Union[str, None] = '78655faf6488' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + + op.execute("DELETE FROM role WHERE name != 'admin'") + admin_rows = conn.execute( + sa.text("SELECT * FROM role WHERE name = 'admin'") + ).fetchall() + + admin_role_id = admin_rows[0].id + + user_rows = conn.execute( + sa.text(f"SELECT * FROM public.user WHERE role_id = '{admin_role_id}'") + ).fetchall() + user_id = user_rows[0].id + + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'resource', + sa.Column('id', sa.String(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=True), + sa.Column('scope', sa.String(), nullable=False), + sa.Column('meta', sa.String(), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('key', 'value', name='key_value'), + ) + op.create_index(op.f('ix_resource_id'), 'resource', ['id'], unique=False) + op.create_table( + 'role_resource', + sa.Column('role_id', sa.String(), nullable=False), + sa.Column('resource_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint( + ['resource_id'], + ['resource.id'], + ), + sa.ForeignKeyConstraint( + ['role_id'], + ['role.id'], + ), + sa.PrimaryKeyConstraint('role_id', 'resource_id'), + ) + op.create_table( + 'user_role', + sa.Column('user_id', sa.Uuid(), nullable=False), + sa.Column('role_id', sa.String(), nullable=False), + sa.ForeignKeyConstraint( + ['role_id'], + ['role.id'], + ), + sa.ForeignKeyConstraint( + ['user_id'], + ['user.id'], + ), + sa.PrimaryKeyConstraint('user_id', 'role_id'), + ) + op.alter_column('notification', 'title', existing_type=sa.VARCHAR(), nullable=False) + op.alter_column('notification', 'type', existing_type=sa.VARCHAR(), nullable=False) + op.alter_column('role', 'description', existing_type=sa.VARCHAR(), nullable=True) + op.add_column('user', sa.Column('deleted', sa.Boolean(), nullable=True)) + op.execute('UPDATE public.user SET deleted = false WHERE deleted IS NULL') + op.alter_column('user', 'deleted', nullable=False) + op.alter_column('user', 'email', existing_type=sa.VARCHAR(), nullable=False) + op.alter_column('user', 'password', existing_type=sa.VARCHAR(), nullable=False) + op.drop_index('ix_user_email', table_name='user') + op.create_index(op.f('ix_user_id'), 'user', ['id'], unique=False) + op.create_unique_constraint(None, 'user', ['email']) + op.drop_column('user', 'role_id') + op.drop_column('user', 'team_id') + + ### + admin_resource_id = uuid.uuid4() + conn.execute( + sa.text( + """ + INSERT INTO resource (id, key, value, description, scope) + VALUES (:id, :key, :value, :description, :scope) + """ + ), + [ + { + 'id': admin_resource_id, + 'key': 'console_resource', + 'value': 'admin_resource', + 'description': 'Admin resources for console', + 'scope': ResourceScope.CONSOLE, + } + ], + ) + + conn.execute( + sa.text( + f"INSERT INTO role_resource VALUES ('{admin_role_id}', '{admin_resource_id}')" + ) + ) + + conn.execute( + sa.text(f"INSERT INTO user_role VALUES ('{user_id}', '{admin_role_id}')") + ) + + console_resources = [ + { + 'key': 'console_resource', + 'value': 'manager_resource', + 'description': 'Manager resources for console', + 'scope': ResourceScope.CONSOLE, + }, + { + 'key': 'console_resource', + 'value': 'editor_resource', + 'description': 'Editor resources for console', + 'scope': ResourceScope.CONSOLE, + }, + { + 'key': 'console_resource', + 'value': 'viewer_resource', + 'description': 'Viewer resources for console', + 'scope': ResourceScope.CONSOLE, + }, + { + 'key': 'console_resource', + 'value': 'guest_resource', + 'description': 'Guest resources for console', + 'scope': ResourceScope.CONSOLE, + }, + ] + + console_roles = [ + { + 'name': 'manager', + 'description': 'Manage users and perform CRUD operations; no config management', + }, + { + 'name': 'editor', + 'description': 'Modify content; no user or config management', + }, + { + 'name': 'viewer', + 'description': 'Read-only access; view data, no modifications', + }, + { + 'name': 'guest', + 'description': 'Minimal access; view public/shared content only', + }, + ] + + resources = [] + roles = [] + role_resources = [] + for i in range(0, len(console_resources)): + resource_id = uuid.uuid4() + res = console_resources[i] + res['id'] = resource_id + resources.append(res) + + role_id = uuid.uuid4() + role = console_roles[i] + role['id'] = role_id + roles.append(role) + + role_resource = {'role_id': role_id, 'resource_id': resource_id} + role_resources.append(role_resource) + + conn.execute( + sa.text( + """ + INSERT INTO resource (id, key, value, description, scope) + VALUES (:id, :key, :value, :description, :scope) + """ + ), + resources, + ) + + conn.execute( + sa.text( + """ + INSERT INTO role (id, name, description) + VALUES (:id, :name, :description) + """ + ), + roles, + ) + + conn.execute( + sa.text( + """ + INSERT INTO role_resource (role_id, resource_id) + VALUES (:role_id, :resource_id) + """ + ), + role_resources, + ) + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + 'user', sa.Column('team_id', sa.VARCHAR(), autoincrement=False, nullable=True) + ) + op.add_column( + 'user', sa.Column('role_id', sa.VARCHAR(), autoincrement=False, nullable=False) + ) + op.drop_constraint(None, 'user', type_='unique') + op.drop_index(op.f('ix_user_id'), table_name='user') + op.create_index('ix_user_email', 'user', ['email'], unique=False) + op.alter_column('user', 'password', existing_type=sa.VARCHAR(), nullable=True) + op.alter_column('user', 'email', existing_type=sa.VARCHAR(), nullable=True) + op.drop_column('user', 'deleted') + op.alter_column('role', 'description', existing_type=sa.VARCHAR(), nullable=False) + op.alter_column('notification', 'type', existing_type=sa.VARCHAR(), nullable=True) + op.alter_column('notification', 'title', existing_type=sa.VARCHAR(), nullable=True) + op.drop_table('user_role') + op.drop_table('role_resource') + op.drop_index(op.f('ix_resource_id'), table_name='resource') + op.drop_table('resource') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_31_1715-36703628c7a6_adding_cacade_delete.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_31_1715-36703628c7a6_adding_cacade_delete.py new file mode 100644 index 00000000..b22e9ee0 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_03_31_1715-36703628c7a6_adding_cacade_delete.py @@ -0,0 +1,93 @@ +"""Adding Cacade Delete + +Revision ID: 36703628c7a6 +Revises: 9b10292a95eb +Create Date: 2025-03-31 17:15:08.654501 + +""" + +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '36703628c7a6' +down_revision: Union[str, None] = '9b10292a95eb' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Drop existing foreign key constraints + op.drop_constraint( + 'role_resource_resource_id_fkey', 'role_resource', type_='foreignkey' + ) + op.drop_constraint( + 'role_resource_role_id_fkey', 'role_resource', type_='foreignkey' + ) + op.drop_constraint('user_role_role_id_fkey', 'user_role', type_='foreignkey') + op.drop_constraint('user_role_user_id_fkey', 'user_role', type_='foreignkey') + + # Create new foreign key constraints with CASCADE delete + op.create_foreign_key( + 'role_resource_resource_id_fkey', + 'role_resource', + 'resource', + ['resource_id'], + ['id'], + ondelete='CASCADE', + ) + op.create_foreign_key( + 'role_resource_role_id_fkey', + 'role_resource', + 'role', + ['role_id'], + ['id'], + ondelete='CASCADE', + ) + op.create_foreign_key( + 'user_role_role_id_fkey', + 'user_role', + 'role', + ['role_id'], + ['id'], + ondelete='CASCADE', + ) + op.create_foreign_key( + 'user_role_user_id_fkey', + 'user_role', + 'user', + ['user_id'], + ['id'], + ondelete='CASCADE', + ) + + +def downgrade() -> None: + # Drop CASCADE foreign key constraints + op.drop_constraint( + 'role_resource_resource_id_fkey', 'role_resource', type_='foreignkey' + ) + op.drop_constraint( + 'role_resource_role_id_fkey', 'role_resource', type_='foreignkey' + ) + op.drop_constraint('user_role_role_id_fkey', 'user_role', type_='foreignkey') + op.drop_constraint('user_role_user_id_fkey', 'user_role', type_='foreignkey') + + # Recreate original foreign key constraints without CASCADE + op.create_foreign_key( + 'role_resource_resource_id_fkey', + 'role_resource', + 'resource', + ['resource_id'], + ['id'], + ) + op.create_foreign_key( + 'role_resource_role_id_fkey', 'role_resource', 'role', ['role_id'], ['id'] + ) + op.create_foreign_key( + 'user_role_role_id_fkey', 'user_role', 'role', ['role_id'], ['id'] + ) + op.create_foreign_key( + 'user_role_user_id_fkey', 'user_role', 'user', ['user_id'], ['id'] + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py new file mode 100644 index 00000000..5881338f --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py @@ -0,0 +1,54 @@ +"""created actionable_insight_query table + +Revision ID: ff32e2dd3106 +Revises: 36703628c7a6 +Create Date: 2025-04-22 14:48:12.819342 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'ff32e2dd3106' +down_revision: Union[str, None] = '36703628c7a6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'actionable_insight_queries', + sa.Column('id', sa.String(length=255), nullable=False), + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('type', sa.String(length=50), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('enabled', sa.Boolean(), nullable=False), + sa.Column( + 'periodicity', postgresql.JSONB(astext_type=sa.Text()), nullable=False + ), + sa.Column( + 'goal_lines', postgresql.JSONB(astext_type=sa.Text()), nullable=False + ), + sa.Column( + 'projections', postgresql.JSONB(astext_type=sa.Text()), nullable=False + ), + sa.Column('query', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('plots', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True + ), + sa.Column( + 'updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True + ), + sa.PrimaryKeyConstraint('id'), + ) + + +def downgrade() -> None: + op.drop_table('actionable_insight_queries') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py new file mode 100644 index 00000000..97b55b93 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py @@ -0,0 +1,79 @@ +"""actionable alerts and query migrations + +Revision ID: 96b784074d1c +Revises: ff32e2dd3106 +Create Date: 2025-04-23 18:39:07.626918 + +""" + +import json +import os +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import yaml + +# revision identifiers, used by Alembic. +revision: str = '96b784074d1c' +down_revision: Union[str, None] = 'ff32e2dd3106' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + query_dir = os.environ.get('SIGNAL_QUERY_DIR', 'default') + base_dir = os.path.dirname(__file__) + + directory = os.path.join(base_dir, '../../queries', query_dir) + directory = os.path.normpath(directory) + + for filename in os.listdir(directory): + if filename.endswith('.yaml') or filename.endswith('.yml'): + file_path = os.path.join(directory, filename) + with open(file_path, 'r') as f: + yaml_data = yaml.safe_load(f) + conn = op.get_bind() + conn.execute( + sa.text( + """ + INSERT INTO actionable_insight_queries ( + id, version, type, title, name, description, enabled, + periodicity, goal_lines, projections, query, plots, + created_at, updated_at + ) + VALUES ( + :id, :version, :type, :title, :name, :description, :enabled, + CAST(:periodicity AS jsonb), CAST(:goal_lines AS jsonb), + CAST(:projections AS jsonb), CAST(:query AS jsonb), + CAST(:plots AS jsonb), now(), now() + ) + """ + ), + { + 'id': yaml_data['id'], + 'version': yaml_data['version'], + 'type': yaml_data['type'], + 'title': yaml_data['title'], + 'name': yaml_data['name'], + 'description': yaml_data.get('description', ''), + 'enabled': yaml_data.get('enabled', True), + 'periodicity': json.dumps(yaml_data['periodicity']), + 'goal_lines': json.dumps(yaml_data['goal_lines']), + 'projections': json.dumps(yaml_data['projections']), + 'query': json.dumps(yaml_data['query']), + 'plots': json.dumps(yaml_data['plots']), + }, + ) + op.create_foreign_key( + 'fk_insight_query', + 'actionable_alerts', + 'actionable_insight_queries', + ['signal_id'], + ['id'], + ) + + +def downgrade() -> None: + op.drop_constraint('fk_insight_query', 'actionable_alerts', type_='foreignkey') + op.execute('TRUNCATE TABLE actionable_insight_queries RESTART IDENTITY CASCADE') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py new file mode 100644 index 00000000..567aaa2e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py @@ -0,0 +1,99 @@ +"""Updated knowledge base tables + +Revision ID: a0dfba41ef64 +Revises: 96b784074d1c +Create Date: 2025-04-24 17:30:04.147978 + +""" + +from typing import Sequence, Union + +from alembic import op +import pgvector +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'a0dfba41ef64' +down_revision: Union[str, None] = '96b784074d1c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + # Drop tables if they exist + op.drop_table('knowledge_base_embeddings', if_exists=True) + op.drop_table('knowledge_base_documents', if_exists=True) + op.drop_table('knowledge_bases', if_exists=True) + + # Create tables + op.create_table( + 'knowledge_bases', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=True), + sa.Column('type', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + ) + op.create_index( + op.f('ix_knowledge_bases_id'), 'knowledge_bases', ['id'], unique=False + ) + op.create_table( + 'knowledge_base_documents', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('knowledge_base_id', sa.Uuid(), nullable=False), + sa.Column('file_path', sa.String(), nullable=False), + sa.Column('file_name', sa.String(), nullable=False), + sa.Column('file_type', sa.String(), nullable=False), + sa.Column('file_size', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['knowledge_base_id'], ['knowledge_bases.id'], ondelete='CASCADE' + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_knowledge_base_documents_id'), + 'knowledge_base_documents', + ['id'], + unique=False, + ) + op.create_table( + 'knowledge_base_embeddings', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('document_id', sa.Uuid(), nullable=False), + sa.Column('embedding_vector', pgvector.sqlalchemy.Vector(), nullable=True), + sa.Column('chunk_text', sa.Text(), nullable=False), + sa.Column('chunk_index', sa.Integer(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['document_id'], ['knowledge_base_documents.id'], ondelete='CASCADE' + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_knowledge_base_embeddings_id'), + 'knowledge_base_embeddings', + ['id'], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + op.f('ix_knowledge_base_embeddings_id'), table_name='knowledge_base_embeddings' + ) + op.drop_table('knowledge_base_embeddings') + op.drop_index( + op.f('ix_knowledge_base_documents_id'), table_name='knowledge_base_documents' + ) + op.drop_table('knowledge_base_documents') + op.drop_index(op.f('ix_knowledge_bases_id'), table_name='knowledge_bases') + op.drop_table('knowledge_bases') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_29_1345-053823285206_create_leads_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_29_1345-053823285206_create_leads_table.py new file mode 100644 index 00000000..5201079b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_29_1345-053823285206_create_leads_table.py @@ -0,0 +1,48 @@ +"""create leads table + +Revision ID: 053823285206 +Revises: a0dfba41ef64 +Create Date: 2025-04-29 13:45:07.797358 + +""" + +from typing import Sequence, Union +import uuid + +from alembic import op +from sqlalchemy import UUID +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '053823285206' +down_revision: Union[str, None] = 'a0dfba41ef64' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'leads', + sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), + sa.Column('product_category', sa.String, nullable=True), + sa.Column('conversation_id', sa.String, nullable=True), + sa.Column('customer_id', sa.String, nullable=True), + sa.Column('agent_id', sa.String, nullable=True), + sa.Column('branch', sa.String, nullable=True), + sa.Column('region', sa.String, nullable=True), + sa.Column('start', sa.Date(), nullable=False), + sa.Column('end', sa.Date(), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True + ), + sa.Column( + 'updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True + ), + # adding an column for leads table + sa.Column('product_name', sa.String, nullable=False), + sa.Column('type', sa.String, nullable=False), + ) + + +def downgrade() -> None: + op.drop_table('leads') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1239-d77dca43b31d_renaming_column_in_leads_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1239-d77dca43b31d_renaming_column_in_leads_table.py new file mode 100644 index 00000000..0d32cf5d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1239-d77dca43b31d_renaming_column_in_leads_table.py @@ -0,0 +1,33 @@ +"""renaming column in leads table + +Revision ID: d77dca43b31d +Revises: 053823285206 +Create Date: 2025-05-02 12:39:41.115042 + +""" + +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = 'd77dca43b31d' +down_revision: Union[str, None] = '053823285206' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.alter_column('leads', 'start', new_column_name='start_date') + op.alter_column('leads', 'end', new_column_name='end_date') + op.execute( + "UPDATE notification SET type = 'warning' WHERE type = 'product_issues';" + ) + + +def downgrade() -> None: + op.alter_column('leads', 'start_date', new_column_name='start') + op.alter_column('leads', 'end_date', new_column_name='end') + op.execute( + "UPDATE notification SET type = 'product_issues' WHERE type = 'warning';" + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1300-497a13558d60_created_the_kb_inference_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1300-497a13558d60_created_the_kb_inference_table.py new file mode 100644 index 00000000..5586f5be --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_02_1300-497a13558d60_created_the_kb_inference_table.py @@ -0,0 +1,51 @@ +"""created the kb_inference table + +Revision ID: 497a13558d60 +Revises: d77dca43b31d +Create Date: 2025-05-02 13:00:14.942014 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '497a13558d60' +down_revision: Union[str, None] = 'd77dca43b31d' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'knowledge_base_inferences', + sa.Column('inference_id', sa.Uuid(), nullable=False), + sa.Column('knowledge_base_id', sa.Uuid(), nullable=False), + sa.Column('inference_content', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['knowledge_base_id'], ['knowledge_bases.id'], ondelete='CASCADE' + ), + sa.PrimaryKeyConstraint('inference_id'), + ) + op.create_index( + op.f('ix_knowledge_base_inferences_inference_id'), + 'knowledge_base_inferences', + ['inference_id'], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + op.f('ix_knowledge_base_inferences_inference_id'), + table_name='knowledge_base_inferences', + ) + op.drop_table('knowledge_base_inferences') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_20_1427-80a6b1232d5e_updated_the_knowledge_base_embeddings.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_20_1427-80a6b1232d5e_updated_the_knowledge_base_embeddings.py new file mode 100644 index 00000000..d4324d12 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_20_1427-80a6b1232d5e_updated_the_knowledge_base_embeddings.py @@ -0,0 +1,27 @@ +"""Updated the knowledge_base_embeddings + +Revision ID: 80a6b1232d5e +Revises: 497a13558d60 +Create Date: 2025-05-20 14:27:52.192885 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '80a6b1232d5e' +down_revision: Union[str, None] = '497a13558d60' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column('knowledge_base_embeddings', sa.Column('token', postgresql.TSVECTOR)) + + +def downgrade() -> None: + op.drop_column('knowledge_base_embeddings', 'token') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_24_1725-ba1f66ca0228_user_session.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_24_1725-ba1f66ca0228_user_session.py new file mode 100644 index 00000000..53b1995e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_24_1725-ba1f66ca0228_user_session.py @@ -0,0 +1,48 @@ +"""user-session + +Revision ID: ba1f66ca0228 +Revises: 80a6b1232d5e +Create Date: 2025-05-24 17:25:58.041570 + +""" + +from typing import Sequence, Union +import uuid + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'ba1f66ca0228' +down_revision: Union[str, None] = '80a6b1232d5e' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'user_session', + sa.Column('id', sa.UUID(), nullable=False, default=uuid.uuid4), + sa.Column('user_id', sa.UUID(), nullable=False), + sa.Column('device_info', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False, default=sa.func.now()), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_user_session_id'), 'user_session', ['id'], unique=False) + op.create_index( + op.f('ix_user_session_user_id'), 'user_session', ['user_id'], unique=False + ) + + +def downgrade() -> None: + op.drop_index(op.f('ix_user_session_user_id'), table_name='user_session') + op.drop_index(op.f('ix_user_session_id'), table_name='user_session') + op.drop_table('user_session') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_25_2103-0da695688814_cascade_rbac.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_25_2103-0da695688814_cascade_rbac.py new file mode 100644 index 00000000..fa5eef0d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_05_25_2103-0da695688814_cascade_rbac.py @@ -0,0 +1,72 @@ +"""cascade-rbac + +Revision ID: 0da695688814 +Revises: ba1f66ca0228 +Create Date: 2025-05-25 21:03:49.706665 + +""" + +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '0da695688814' +down_revision: Union[str, None] = 'ba1f66ca0228' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Clean up orphaned records + op.execute(""" + DELETE FROM user_role + WHERE user_id NOT IN (SELECT id FROM "user") + OR role_id NOT IN (SELECT id FROM role) + """) + + op.execute(""" + DELETE FROM role_resource + WHERE role_id NOT IN (SELECT id FROM role) + OR resource_id NOT IN (SELECT id FROM resource) + """) + + # Drop existing foreign key constraints + op.drop_constraint('user_role_user_id_fkey', 'user_role', type_='foreignkey') + op.drop_constraint( + 'role_resource_role_id_fkey', 'role_resource', type_='foreignkey' + ) + + # Add new foreign key constraints with CASCADE + op.create_foreign_key( + 'user_role_user_id_fkey', + 'user_role', + 'user', + ['user_id'], + ['id'], + ondelete='CASCADE', + ) + op.create_foreign_key( + 'role_resource_role_id_fkey', + 'role_resource', + 'role', + ['role_id'], + ['id'], + ondelete='CASCADE', + ) + + +def downgrade() -> None: + # Drop CASCADE foreign key constraints + op.drop_constraint('user_role_user_id_fkey', 'user_role', type_='foreignkey') + op.drop_constraint( + 'role_resource_role_id_fkey', 'role_resource', type_='foreignkey' + ) + + # Recreate original foreign key constraints without CASCADE + op.create_foreign_key( + 'user_role_user_id_fkey', 'user_role', 'user', ['user_id'], ['id'] + ) + op.create_foreign_key( + 'role_resource_role_id_fkey', 'role_resource', 'role', ['role_id'], ['id'] + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_06_11_1937-b365be32ca72_updated_the_knowledge_base_tables_for_.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_06_11_1937-b365be32ca72_updated_the_knowledge_base_tables_for_.py new file mode 100644 index 00000000..f8553c18 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_06_11_1937-b365be32ca72_updated_the_knowledge_base_tables_for_.py @@ -0,0 +1,32 @@ +"""Updated the knowledge base tables for vector_size + +Revision ID: b365be32ca72 +Revises: 0da695688814 +Create Date: 2025-06-11 19:37:24.072005 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'b365be32ca72' +down_revision: Union[str, None] = '0da695688814' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + 'knowledge_bases', sa.Column('vector_size', sa.Integer(), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('knowledge_bases', 'vector_size') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_09_1936-827b9d399023_add_auth_secrets_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_09_1936-827b9d399023_add_auth_secrets_table.py new file mode 100644 index 00000000..06a212a2 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_09_1936-827b9d399023_add_auth_secrets_table.py @@ -0,0 +1,42 @@ +"""add auth_secrets_table + +Revision ID: 827b9d399023 +Revises: b365be32ca72 +Create Date: 2025-07-09 19:36:34.540004 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '827b9d399023' +down_revision: Union[str, None] = 'b365be32ca72' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'auth_secrets', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('client_key', sa.String(), nullable=False), + sa.Column('client_secret', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('client_key'), + ) + op.create_index(op.f('ix_auth_secrets_id'), 'auth_secrets', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_auth_secrets_id'), table_name='auth_secrets') + op.drop_table('auth_secrets') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_10_1418-0db19a0af2af_added_datasource_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_10_1418-0db19a0af2af_added_datasource_table.py new file mode 100644 index 00000000..5f2f3b69 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_10_1418-0db19a0af2af_added_datasource_table.py @@ -0,0 +1,46 @@ +"""Added datasource table + +Revision ID: 0db19a0af2af +Revises: 827b9d399023 +Create Date: 2025-07-04 14:18:48.271013 + +""" + +import uuid +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '0db19a0af2af' +down_revision: Union[str, None] = '827b9d399023' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'datasource', + sa.Column('id', sa.UUID(), nullable=False, default=uuid.uuid4), + sa.Column('name', sa.String(length=64), nullable=False), + sa.Column('description', sa.String(length=255), nullable=True), + sa.Column('type', sa.String(length=64), nullable=False), + sa.Column('config', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False, default=sa.func.now()), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_datasource_id'), 'datasource', ['id'], unique=False) + + +def downgrade() -> None: + op.drop_index(op.f('ix_datasource_id'), table_name='datasource') + op.drop_table('datasource') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_14_1400-a1b2c3d4e5f7_create_authenticators_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_14_1400-a1b2c3d4e5f7_create_authenticators_table.py new file mode 100644 index 00000000..820a520b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_07_14_1400-a1b2c3d4e5f7_create_authenticators_table.py @@ -0,0 +1,95 @@ +"""Create authenticators table + +Revision ID: a1b2c3d4e5f7 +Revises: 0db19a0af2af +Create Date: 2025-07-14 14:00:00.000000 + +""" + +import json +import uuid +from typing import Sequence, Union +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'a1b2c3d4e5f7' +down_revision: Union[str, None] = '0db19a0af2af' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create authenticator table + op.create_table( + 'authenticator', + sa.Column( + 'auth_id', + postgresql.UUID(as_uuid=True), + nullable=False, + primary_key=True, + default=sa.text('gen_random_uuid()'), + ), + sa.Column('auth_name', sa.String(length=64), nullable=False, unique=True), + sa.Column('auth_type', sa.String(), nullable=False), + sa.Column('auth_desc', sa.String(), nullable=True), + sa.Column('config', postgresql.JSONB(), nullable=False), + sa.Column('is_enabled', sa.Boolean(), nullable=False, default=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False, default=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() + ), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.PrimaryKeyConstraint('auth_id'), + sa.UniqueConstraint('auth_name'), + sa.CheckConstraint("auth_name !~ '\\s'", name='auth_name_no_spaces'), + ) + + # Get database connection + conn = op.get_bind() + + # Default email_password authenticator configuration + default_config = { + 'password_policy': { + 'min_length': 8, + 'require_uppercase': True, + 'require_lowercase': True, + 'require_numbers': True, + 'require_special_chars': False, + 'max_attempts': 5, + 'lockout_duration': 900, + }, + 'two_factor_enabled': False, + 'password_reset_enabled': True, + 'session_timeout': 3600, + 'rate_limit_enabled': True, + } + + # Insert default email_password authenticator using parameterized statement + conn.execute( + sa.text(""" + INSERT INTO authenticator (auth_id, auth_name, auth_type, auth_desc, config, is_enabled, is_deleted) + VALUES (:auth_id, :auth_name, :auth_type, :auth_desc, :config, :is_enabled, :is_deleted) + """), + { + 'auth_id': uuid.uuid4(), + 'auth_name': 'email_password', + 'auth_type': 'email_password', + 'auth_desc': 'Traditional email and password authentication', + 'config': json.dumps(default_config), + 'is_enabled': True, + 'is_deleted': False, + }, + ) + + +def downgrade() -> None: + # Drop authenticator table + op.drop_table('authenticator') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_11_1516-68ffaa4a3665_create_product_analysis_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_11_1516-68ffaa4a3665_create_product_analysis_table.py new file mode 100644 index 00000000..bb29e404 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_11_1516-68ffaa4a3665_create_product_analysis_table.py @@ -0,0 +1,47 @@ +"""create product -analysis table + +Revision ID: 68ffaa4a3665 +Revises: a1b2c3d4e5f7 +Create Date: 2025-08-11 15:16:09.245567 + +""" + +from typing import Sequence, Union +import uuid +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision: str = '68ffaa4a3665' +down_revision: Union[str, None] = 'a1b2c3d4e5f7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'product_analytics', + sa.Column( + 'event_id', sa.UUID(), primary_key=True, nullable=False, default=uuid.uuid4 + ), + sa.Column('event_name', sa.String(length=255), nullable=False), + sa.Column('type', sa.String(length=255), nullable=True), + sa.Column('sub_type', sa.String(length=255), nullable=True), + sa.Column('category', sa.String(length=255), nullable=True), + sa.Column('sub_category', sa.String(length=255), nullable=True), + sa.Column('action', sa.String(length=255), nullable=True), + sa.Column('action_type', sa.String(length=255), nullable=True), + sa.Column('page', sa.String(length=255), nullable=False), + sa.Column('page_path', sa.String(), nullable=False), + sa.Column('matadata', postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.Column('user_id', sa.String(length=255), nullable=False), + sa.Column('session_id', sa.String(), nullable=False), + sa.Column('user_role', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False, default=sa.func.now()), + ) + + +def downgrade() -> None: + op.drop_table('product_analytics') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1309-1ef7d577ea53_add_account_lockout_fields_to_user_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1309-1ef7d577ea53_add_account_lockout_fields_to_user_table.py new file mode 100644 index 00000000..455630e5 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1309-1ef7d577ea53_add_account_lockout_fields_to_user_table.py @@ -0,0 +1,45 @@ +"""add account lockout fields to user table + +Revision ID: 1ef7d577ea53 +Revises: 68ffaa4a3665 +Create Date: 2025-08-13 13:09:27.095292 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '1ef7d577ea53' +down_revision: Union[str, None] = '68ffaa4a3665' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Add account lockout fields to user table + # First add as nullable with server default + op.add_column( + 'user', + sa.Column('failed_attempts', sa.Integer(), nullable=True, server_default='0'), + ) + op.add_column('user', sa.Column('locked_until', sa.DateTime(), nullable=True)) + op.add_column( + 'user', sa.Column('last_failed_attempt', sa.DateTime(), nullable=True) + ) + + # Update existing records to have failed_attempts = 0 + op.execute('UPDATE "user" SET failed_attempts = 0 WHERE failed_attempts IS NULL') + + # Now make failed_attempts non-nullable with default value + op.alter_column('user', 'failed_attempts', nullable=False, server_default='0') + + +def downgrade() -> None: + # Remove account lockout fields from user table + op.drop_column('user', 'last_failed_attempt') + op.drop_column('user', 'locked_until') + op.drop_column('user', 'failed_attempts') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1328-d5caffc321f2_create_config_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1328-d5caffc321f2_create_config_table.py new file mode 100644 index 00000000..d0809d3e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_13_1328-d5caffc321f2_create_config_table.py @@ -0,0 +1,39 @@ +"""create config table + +Revision ID: d5caffc321f2 +Revises: 1ef7d577ea53 +Create Date: 2025-08-18 12:28:31.653508 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'd5caffc321f2' +down_revision: Union[str, None] = '1ef7d577ea53' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'config', + sa.Column('key', sa.String(255), primary_key=True), + sa.Column('value', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False, default=sa.func.now()), + sa.Column( + 'updated_at', + sa.DateTime, + nullable=False, + default=sa.func.now(), + onupdate=sa.func.now(), + ), + ) + + +def downgrade() -> None: + op.drop_table('config') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_14_1455-1aaf2b1e6d56_add_last_login_at_to_user_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_14_1455-1aaf2b1e6d56_add_last_login_at_to_user_table.py new file mode 100644 index 00000000..a9332679 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_14_1455-1aaf2b1e6d56_add_last_login_at_to_user_table.py @@ -0,0 +1,29 @@ +"""add last_login_at to user table + +Revision ID: 1aaf2b1e6d56 +Revises: d5caffc321f2 +Create Date: 2025-08-14 14:55:39.364897 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '1aaf2b1e6d56' +down_revision: Union[str, None] = 'd5caffc321f2' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Add last_login_at field to user table + op.add_column('user', sa.Column('last_login_at', sa.DateTime(), nullable=True)) + + +def downgrade() -> None: + # Remove last_login_at field from user table + op.drop_column('user', 'last_login_at') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_24_0717-f1f6e5c42780_created_model_inference_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_24_0717-f1f6e5c42780_created_model_inference_table.py new file mode 100644 index 00000000..3a44eb41 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_08_24_0717-f1f6e5c42780_created_model_inference_table.py @@ -0,0 +1,46 @@ +"""created model_inference table + +Revision ID: f1f6e5c42780 +Revises: 1aaf2b1e6d56 +Create Date: 2025-08-24 07:17:31.968141 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'f1f6e5c42780' +down_revision: Union[str, None] = '1aaf2b1e6d56' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'model_inference', + sa.Column('model_id', sa.Uuid(), nullable=False), + sa.Column('model_name', sa.String(), nullable=True), + sa.Column('model_path', sa.String(), nullable=True), + sa.Column('model_type', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('model_id'), + sa.UniqueConstraint('model_name'), + ) + op.create_index( + op.f('ix_model_inference_model_id'), + 'model_inference', + ['model_id'], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_model_inference_model_id'), table_name='model_inference') + op.drop_table('model_inference') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_01_1703-23db0be3a87a_create_llm_inference_config_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_01_1703-23db0be3a87a_create_llm_inference_config_table.py new file mode 100644 index 00000000..8bcb913b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_01_1703-23db0be3a87a_create_llm_inference_config_table.py @@ -0,0 +1,49 @@ +"""create llm_inference_config table + +Revision ID: 23db0be3a87a +Revises: f1f6e5c42780 +Create Date: 2025-09-01 17:03:30.980177 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '23db0be3a87a' +down_revision: Union[str, None] = 'f1f6e5c42780' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'llm_inference_config', + sa.Column('id', sa.Uuid(), nullable=False), + sa.Column('llm_model', sa.String(length=255), nullable=False), + sa.Column('display_name', sa.String(length=255), nullable=False), + sa.Column('api_key', sa.String(length=512), nullable=True), + sa.Column('type', sa.String(length=64), nullable=False), + sa.Column('base_url', sa.String(length=512), nullable=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.Column('updated_at', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_llm_inference_config_id'), + 'llm_inference_config', + ['id'], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_llm_inference_config_id'), table_name='llm_inference_config') + op.drop_table('llm_inference_config') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_03_1230-bb3907e50d30_create_config_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_03_1230-bb3907e50d30_create_config_table.py new file mode 100644 index 00000000..73f3603a --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_03_1230-bb3907e50d30_create_config_table.py @@ -0,0 +1,43 @@ +"""create config table + +Revision ID: bb3907e50d30 +Revises: 23db0be3a87a +Create Date: 2025-09-03 12:30:17.664871 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'bb3907e50d30' +down_revision: Union[str, None] = '23db0be3a87a' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'dynamic_query_yaml', + sa.Column('name', sa.String(255), primary_key=True), + sa.Column('datasource_id', sa.UUID(), nullable=True), # Changed to UUID + sa.Column('file_path', sa.String(255), nullable=False), + sa.Column('created_at', sa.DateTime, nullable=False, default=sa.func.now()), + sa.Column( + 'updated_at', + sa.DateTime, + nullable=False, + default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.ForeignKeyConstraint( + ['datasource_id'], ['datasource.id'], ondelete='CASCADE' + ), + ) + + +def downgrade() -> None: + op.drop_table('dynamic_query_yaml') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_16_1332-bf901c107c8d_create_image_search_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_16_1332-bf901c107c8d_create_image_search_tables.py new file mode 100644 index 00000000..d02c91f7 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_09_16_1332-bf901c107c8d_create_image_search_tables.py @@ -0,0 +1,80 @@ +"""create_image_search_tables + +Revision ID: bf901c107c8d +Revises: bb3907e50d30 +Create Date: 2025-09-16 13:32:42.978978 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'bf901c107c8d' +down_revision: Union[str, None] = 'bb3907e50d30' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic ### + op.create_table( + 'image_knowledge_bases', + sa.Column('ikb_id', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=100), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('ikb_type', sa.String(length=50), nullable=False), + sa.Column('algorithm_type', sa.String(length=50), nullable=False), + sa.Column('status', sa.String(length=20), nullable=True), + sa.Column('config', sa.JSON(), nullable=False), + sa.Column('image_count', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('ikb_id'), + ) + op.create_table( + 'reference_image_features', + sa.Column('reference_image_id', sa.String(length=255), nullable=False), + sa.Column('ikb_id', sa.String(length=255), nullable=True), + sa.Column('algorithm_type', sa.String(length=50), nullable=False), + sa.Column('image_url', sa.String(length=500), nullable=False), + sa.Column('image_metadata', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['ikb_id'], + ['image_knowledge_bases.ikb_id'], + ), + sa.PrimaryKeyConstraint('reference_image_id'), + ) + op.create_table( + 'sift_features', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('reference_image_id', sa.String(length=255), nullable=False), + sa.Column('keypoint_id', sa.Integer(), nullable=False), + sa.Column('x', sa.Float(), nullable=False), + sa.Column('y', sa.Float(), nullable=False), + sa.Column('size', sa.Float(), nullable=False), + sa.Column('angle', sa.Float(), nullable=False), + sa.Column('response', sa.Float(), nullable=False), + sa.Column('octave', sa.Integer(), nullable=False), + sa.Column('class_id', sa.Integer(), nullable=False), + sa.Column('descriptor', sa.JSON(), nullable=False), + sa.Column('created_at', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['reference_image_id'], + ['reference_image_features.reference_image_id'], + ondelete='CASCADE', + ), + sa.PrimaryKeyConstraint('id'), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic + op.drop_table('sift_features') + op.drop_table('reference_image_features') + op.drop_table('image_knowledge_bases') + # ### end Alembic commands ### diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_21_1535-d54e5612306e_workflow_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_21_1535-d54e5612306e_workflow_tables.py new file mode 100644 index 00000000..518a9585 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_21_1535-d54e5612306e_workflow_tables.py @@ -0,0 +1,69 @@ +"""Workflow tables + +Revision ID: d54e5612306e +Revises: bf901c107c8d +Create Date: 2025-10-21 15:35:17.038431 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = 'd54e5612306e' +down_revision: Union[str, None] = 'bf901c107c8d' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create workflow_pipeline table + op.create_table( + 'workflow_pipeline', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=True), + sa.Column('location', sa.String(), nullable=False), + sa.Column('retry_policy', sa.String(), nullable=True), + sa.Column('timeout', sa.Integer(), nullable=True), + sa.Column('concurrency_limit', sa.Integer(), nullable=True, default=1), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_workflow_pipeline_id'), 'workflow_pipeline', ['id'], unique=False + ) + + # Create workflow_runs table + op.create_table( + 'workflow_runs', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column( + 'workflow_pipeline_id', postgresql.UUID(as_uuid=True), nullable=False + ), + sa.Column('status', sa.String(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('end_time', sa.DateTime(), nullable=True), + sa.Column('error', sa.String(), nullable=True), + sa.Column('output', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(), nullable=True), + sa.Column('updated_at', sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint( + ['workflow_pipeline_id'], ['workflow_pipeline.id'], ondelete='CASCADE' + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_workflow_runs_id'), 'workflow_runs', ['id'], unique=False) + + +def downgrade() -> None: + # Drop tables in reverse order (workflow_runs first due to foreign key constraint) + op.drop_index(op.f('ix_workflow_runs_id'), table_name='workflow_runs') + op.drop_table('workflow_runs') + + op.drop_index(op.f('ix_workflow_pipeline_id'), table_name='workflow_pipeline') + op.drop_table('workflow_pipeline') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_24_0713-6742f38ca303_add_new_columns_in_knowledge_base_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_24_0713-6742f38ca303_add_new_columns_in_knowledge_base_tables.py new file mode 100644 index 00000000..b0337a27 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_24_0713-6742f38ca303_add_new_columns_in_knowledge_base_tables.py @@ -0,0 +1,36 @@ +"""add new columns in knowledge base tables + +Revision ID: 6742f38ca303 +Revises: d54e5612306e +Create Date: 2025-10-24 07:13:22.702999 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import pgvector + +# revision identifiers, used by Alembic. +revision: str = '6742f38ca303' +down_revision: Union[str, None] = 'd54e5612306e' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + 'knowledge_base_embeddings', + sa.Column( + 'embedding_vector_1', pgvector.sqlalchemy.vector.VECTOR(), nullable=True + ), + ) + op.add_column( + 'knowledge_bases', sa.Column('vector_size_1', sa.Integer(), nullable=True) + ) + + +def downgrade() -> None: + op.drop_column('knowledge_bases', 'vector_size_1') + op.drop_column('knowledge_base_embeddings', 'embedding_vector_1') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_29_1346-22ec0134dcf8_create_voice_agents_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_29_1346-22ec0134dcf8_create_voice_agents_tables.py new file mode 100644 index 00000000..b0d19ccf --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_10_29_1346-22ec0134dcf8_create_voice_agents_tables.py @@ -0,0 +1,146 @@ +"""create_voice_agents_tables + +Revision ID: 22ec0134dcf8 +Revises: 6742f38ca303 +Create Date: 2025-10-29 13:46:33.854725 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision: str = '22ec0134dcf8' +down_revision: Union[str, None] = '6742f38ca303' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create telephony_configs table + op.create_table( + 'telephony_configs', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('provider', sa.String(length=64), nullable=False), + sa.Column('connection_type', sa.String(length=64), nullable=False), + sa.Column('credentials', sa.Text(), nullable=False), + sa.Column('phone_numbers', sa.Text(), nullable=False), + sa.Column('webhook_config', sa.Text(), nullable=True), + sa.Column('sip_config', sa.Text(), nullable=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default='false'), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index( + op.f('ix_telephony_configs_id'), 'telephony_configs', ['id'], unique=False + ) + op.create_index( + op.f('ix_telephony_configs_provider'), + 'telephony_configs', + ['provider'], + unique=False, + ) + + # Create tts_configs table + op.create_table( + 'tts_configs', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('provider', sa.String(length=64), nullable=False), + sa.Column('voice_id', sa.String(length=255), nullable=False), + sa.Column('api_key', sa.String(length=512), nullable=False), + sa.Column('language', sa.String(length=64), nullable=True), + sa.Column('parameters', sa.Text(), nullable=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default='false'), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_tts_configs_id'), 'tts_configs', ['id'], unique=False) + op.create_index( + op.f('ix_tts_configs_provider'), 'tts_configs', ['provider'], unique=False + ) + + # Create stt_configs table + op.create_table( + 'stt_configs', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('provider', sa.String(length=64), nullable=False), + sa.Column('api_key', sa.String(length=512), nullable=False), + sa.Column('language', sa.String(length=64), nullable=True), + sa.Column('parameters', sa.Text(), nullable=True), + sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default='false'), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_stt_configs_id'), 'stt_configs', ['id'], unique=False) + op.create_index( + op.f('ix_stt_configs_provider'), 'stt_configs', ['provider'], unique=False + ) + + # Create voice_agents table (with foreign keys to the above tables) + op.create_table( + 'voice_agents', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('llm_config_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('tts_config_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('stt_config_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('telephony_config_id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('system_prompt', sa.Text(), nullable=False), + sa.Column('conversation_config', sa.Text(), nullable=True), + sa.Column('status', sa.String(length=64), nullable=False), + sa.Column('is_deleted', sa.Boolean(), nullable=False, server_default='false'), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.ForeignKeyConstraint(['llm_config_id'], ['llm_inference_config.id']), + sa.ForeignKeyConstraint(['tts_config_id'], ['tts_configs.id']), + sa.ForeignKeyConstraint(['stt_config_id'], ['stt_configs.id']), + sa.ForeignKeyConstraint(['telephony_config_id'], ['telephony_configs.id']), + sa.PrimaryKeyConstraint('id'), + ) + op.create_index(op.f('ix_voice_agents_id'), 'voice_agents', ['id'], unique=False) + op.create_index( + op.f('ix_voice_agents_status'), 'voice_agents', ['status'], unique=False + ) + + +def downgrade() -> None: + # Drop tables in reverse order (voice_agents first due to foreign key constraints) + op.drop_index(op.f('ix_voice_agents_status'), table_name='voice_agents') + op.drop_index(op.f('ix_voice_agents_id'), table_name='voice_agents') + op.drop_table('voice_agents') + + op.drop_index(op.f('ix_stt_configs_provider'), table_name='stt_configs') + op.drop_index(op.f('ix_stt_configs_id'), table_name='stt_configs') + op.drop_table('stt_configs') + + op.drop_index(op.f('ix_tts_configs_provider'), table_name='tts_configs') + op.drop_index(op.f('ix_tts_configs_id'), table_name='tts_configs') + op.drop_table('tts_configs') + + op.drop_index(op.f('ix_telephony_configs_provider'), table_name='telephony_configs') + op.drop_index(op.f('ix_telephony_configs_id'), table_name='telephony_configs') + op.drop_table('telephony_configs') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_03_1437-9bd8b7884ab0_update_the_metadata_column_in_knowledge_.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_03_1437-9bd8b7884ab0_update_the_metadata_column_in_knowledge_.py new file mode 100644 index 00000000..7fa31ead --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_03_1437-9bd8b7884ab0_update_the_metadata_column_in_knowledge_.py @@ -0,0 +1,29 @@ +"""Update the metadata column in knowledge base documents table + +Revision ID: 9bd8b7884ab0 +Revises: 6742f38ca303 +Create Date: 2025-11-03 14:37:38.268823 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '9bd8b7884ab0' +down_revision: Union[str, None] = '22ec0134dcf8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + 'knowledge_base_documents', + sa.Column('metadata_value', sa.JSON(), nullable=True), + ) + + +def downgrade() -> None: + op.drop_column('knowledge_base_documents', 'metadata_value') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_04_1544-584f653169fd_add_parameters_to_llm_inference_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_04_1544-584f653169fd_add_parameters_to_llm_inference_config.py new file mode 100644 index 00000000..038eef49 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_04_1544-584f653169fd_add_parameters_to_llm_inference_config.py @@ -0,0 +1,91 @@ +"""voice_agents_and_llm_config_updates + +This migration adds several enhancements to voice agent configuration and LLM inference: +1. Adds 'parameters' JSON column to llm_inference_config for flexible LLM parameters +2. Adds 'welcome_message' text column to voice_agents for storing greeting messages +3. Adds 'display_name' and 'description' fields to TTS, STT, and telephony configs + +Revision ID: 584f653169fd +Revises: 9bd8b7884ab0 +Create Date: 2025-11-04 15:44:13.442528 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '584f653169fd' +down_revision: Union[str, None] = '9bd8b7884ab0' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Add parameters column to llm_inference_config + op.add_column( + 'llm_inference_config', sa.Column('parameters', sa.JSON(), nullable=True) + ) + + # Add welcome_message column to voice_agents + op.add_column( + 'voice_agents', + sa.Column('welcome_message', sa.Text(), nullable=False, server_default=''), + ) + + # Add display_name and description to tts_configs + op.add_column( + 'tts_configs', + sa.Column( + 'display_name', sa.String(length=100), nullable=False, server_default='' + ), + ) + op.add_column( + 'tts_configs', sa.Column('description', sa.String(length=500), nullable=True) + ) + + # Add display_name and description to stt_configs + op.add_column( + 'stt_configs', + sa.Column( + 'display_name', sa.String(length=100), nullable=False, server_default='' + ), + ) + op.add_column( + 'stt_configs', sa.Column('description', sa.String(length=500), nullable=True) + ) + + # Add display_name and description to telephony_configs + op.add_column( + 'telephony_configs', + sa.Column( + 'display_name', sa.String(length=100), nullable=False, server_default='' + ), + ) + op.add_column( + 'telephony_configs', + sa.Column('description', sa.String(length=500), nullable=True), + ) + + +def downgrade() -> None: + # Remove display_name and description from telephony_configs + op.drop_column('telephony_configs', 'description') + op.drop_column('telephony_configs', 'display_name') + + # Remove display_name and description from stt_configs + op.drop_column('stt_configs', 'description') + op.drop_column('stt_configs', 'display_name') + + # Remove display_name and description from tts_configs + op.drop_column('tts_configs', 'description') + op.drop_column('tts_configs', 'display_name') + + # Remove welcome_message from voice_agents + op.drop_column('voice_agents', 'welcome_message') + + # Remove parameters from llm_inference_config + op.drop_column('llm_inference_config', 'parameters') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_13_1654-ed9fca299c85_create_agents_and_namespaces_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_13_1654-ed9fca299c85_create_agents_and_namespaces_tables.py new file mode 100644 index 00000000..56a4ee44 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_13_1654-ed9fca299c85_create_agents_and_namespaces_tables.py @@ -0,0 +1,125 @@ +"""create_agents_namespaces_and_workflows_tables + +Revision ID: ed9fca299c85 +Revises: 584f653169fd +Create Date: 2025-11-13 16:54:05.535954 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision: str = 'ed9fca299c85' +down_revision: Union[str, None] = '584f653169fd' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create namespaces table first (parent) + op.create_table( + 'namespaces', + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('name'), + ) + op.create_index(op.f('ix_namespaces_name'), 'namespaces', ['name'], unique=True) + + # Insert default namespace + op.execute("INSERT INTO namespaces (name) VALUES ('default')") + + # Create agents table (child with FK to namespaces) + op.create_table( + 'agents', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('namespace', sa.String(length=255), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.ForeignKeyConstraint(['namespace'], ['namespaces.name']), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', 'namespace', name='uq_agents_name_namespace'), + ) + op.create_index(op.f('ix_agents_id'), 'agents', ['id'], unique=False) + op.create_index(op.f('ix_agents_namespace'), 'agents', ['namespace'], unique=False) + + # Create workflows table (child with FK to namespaces) + op.create_table( + 'workflows', + sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('namespace', sa.String(length=255), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.ForeignKeyConstraint(['namespace'], ['namespaces.name']), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', 'namespace', name='uq_workflows_name_namespace'), + ) + op.create_index(op.f('ix_workflows_id'), 'workflows', ['id'], unique=False) + op.create_index( + op.f('ix_workflows_namespace'), 'workflows', ['namespace'], unique=False + ) + + # Update workflow_pipeline table: replace location with workflow_id FK + op.drop_column('workflow_pipeline', 'location') + op.add_column( + 'workflow_pipeline', + sa.Column('workflow_id', postgresql.UUID(as_uuid=True), nullable=False), + ) + op.create_foreign_key( + 'fk_workflow_pipeline_workflow_id', + 'workflow_pipeline', + 'workflows', + ['workflow_id'], + ['id'], + ) + op.create_index( + op.f('ix_workflow_pipeline_workflow_id'), + 'workflow_pipeline', + ['workflow_id'], + unique=False, + ) + + +def downgrade() -> None: + # Revert workflow_pipeline table changes + op.drop_index( + op.f('ix_workflow_pipeline_workflow_id'), table_name='workflow_pipeline' + ) + op.drop_constraint( + 'fk_workflow_pipeline_workflow_id', 'workflow_pipeline', type_='foreignkey' + ) + op.drop_column('workflow_pipeline', 'workflow_id') + op.add_column( + 'workflow_pipeline', sa.Column('location', sa.String(), nullable=False) + ) + + # Drop in reverse order (children first, then parent) + op.drop_index(op.f('ix_workflows_namespace'), table_name='workflows') + op.drop_index(op.f('ix_workflows_id'), table_name='workflows') + op.drop_table('workflows') + + op.drop_index(op.f('ix_agents_namespace'), table_name='agents') + op.drop_index(op.f('ix_agents_id'), table_name='agents') + op.drop_table('agents') + + op.drop_index(op.f('ix_namespaces_name'), table_name='namespaces') + op.drop_table('namespaces') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_19_1515-af9dfcda24fb_add_message_processor_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_19_1515-af9dfcda24fb_add_message_processor_table.py new file mode 100644 index 00000000..82d8942f --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_19_1515-af9dfcda24fb_add_message_processor_table.py @@ -0,0 +1,50 @@ +"""Add message processor table + +Revision ID: af9dfcda24fb +Revises: ed9fca299c85 +Create Date: 2025-11-19 15:15:34.241072 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +import uuid + +# revision identifiers, used by Alembic. +revision: str = 'af9dfcda24fb' +down_revision: Union[str, None] = 'ed9fca299c85' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Create message_processors table + op.create_table( + 'message_processors', + sa.Column( + 'id', postgresql.UUID(as_uuid=True), nullable=False, default=uuid.uuid4 + ), + sa.Column('name', sa.String(length=64), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('source', sa.String(length=512), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + sa.UniqueConstraint('source'), + ) + op.create_index( + op.f('ix_message_processors_id'), 'message_processors', ['id'], unique=False + ) + + +def downgrade() -> None: + op.drop_index(op.f('ix_message_processors_id'), table_name='message_processors') + op.drop_table('message_processors') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_23_1015-a9a5d624020c_api_service.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_23_1015-a9a5d624020c_api_service.py new file mode 100644 index 00000000..a1c17ae2 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_23_1015-a9a5d624020c_api_service.py @@ -0,0 +1,45 @@ +"""api_service + +Revision ID: a9a5d624020c +Revises: af9dfcda24fb +Create Date: 2025-11-23 10:15:22.245337 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'a9a5d624020c' +down_revision: Union[str, None] = 'af9dfcda24fb' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'api_services', + sa.Column('id', sa.String(length=255), nullable=False), + sa.Column('service_def_path', sa.String(length=255), nullable=False), + sa.Column( + 'is_active', + sa.Boolean(), + nullable=False, + default=True, + server_default=sa.text('true'), + ), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.Column( + 'updated_at', sa.DateTime(), nullable=False, server_default=sa.text('now()') + ), + sa.PrimaryKeyConstraint('id'), + ) + + +def downgrade() -> None: + op.drop_table('api_services') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_26_0656-ca83b60258d6_added_config_id_column_in_inference_.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_26_0656-ca83b60258d6_added_config_id_column_in_inference_.py new file mode 100644 index 00000000..55a5c762 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_11_26_0656-ca83b60258d6_added_config_id_column_in_inference_.py @@ -0,0 +1,42 @@ +"""Added config_id column in inference table + +Revision ID: ca83b60258d6 +Revises: a9a5d624020c +Create Date: 2025-11-26 06:56:44.540145 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = 'ca83b60258d6' +down_revision: Union[str, None] = 'a9a5d624020c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + 'knowledge_base_inferences', + sa.Column('config_id', sa.Uuid(), nullable=True), + ) + op.create_foreign_key( + 'fk_kb_inferences_config_id', + 'knowledge_base_inferences', + 'llm_inference_config', + ['config_id'], + ['id'], + ondelete='CASCADE', + ) + + +def downgrade() -> None: + op.drop_constraint( + 'fk_kb_inferences_config_id', + 'knowledge_base_inferences', + type_='foreignkey', + ) + op.drop_column('knowledge_base_inferences', 'config_id') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py new file mode 100644 index 00000000..a22b7ac0 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py @@ -0,0 +1,104 @@ +"""drop_actionable_alert_insight_leads_table + +Revision ID: 10e09e25efa0 +Revises: ca83b60258d6 +Create Date: 2025-12-01 16:19:58.228914 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '10e09e25efa0' +down_revision: Union[str, None] = 'ca83b60258d6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Drop the three tables + op.drop_table('actionable_alerts') + op.drop_table('actionable_insight_queries') + op.drop_table('leads') + + +def downgrade() -> None: + # Recreate leads table + op.create_table( + 'leads', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('product_category', sa.String(), nullable=True), + sa.Column('conversation_id', sa.String(), nullable=True), + sa.Column('customer_id', sa.String(), nullable=True), + sa.Column('agent_id', sa.String(), nullable=True), + sa.Column('branch', sa.String(), nullable=True), + sa.Column('region', sa.String(), nullable=True), + sa.Column('start_date', sa.Date(), nullable=False), + sa.Column('end_date', sa.Date(), nullable=False), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() + ), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.Column('type', sa.String(), nullable=False), + sa.Column('product_name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + + # Recreate actionable_insight_queries table + op.create_table( + 'actionable_insight_queries', + sa.Column('id', sa.String(length=255), nullable=False), + sa.Column('version', sa.Integer(), nullable=False), + sa.Column('type', sa.String(length=50), nullable=False), + sa.Column('title', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('enabled', sa.Boolean(), nullable=False, server_default='true'), + sa.Column('periodicity', sa.dialects.postgresql.JSONB(), nullable=False), + sa.Column('goal_lines', sa.dialects.postgresql.JSONB(), nullable=False), + sa.Column('projections', sa.dialects.postgresql.JSONB(), nullable=False), + sa.Column('query', sa.dialects.postgresql.JSONB(), nullable=False), + sa.Column('plots', sa.dialects.postgresql.JSONB(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()), + sa.Column( + 'updated_at', + sa.DateTime(), + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.PrimaryKeyConstraint('id'), + ) + + # Recreate actionable_alerts table + op.create_table( + 'actionable_alerts', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('signal_id', sa.String(), nullable=False), + sa.Column('title', sa.String(), nullable=True), + sa.Column('description', sa.String(), nullable=True), + sa.Column('signal_type', sa.String(), nullable=False), + sa.Column('signal_name', sa.String(), nullable=True), + sa.Column('alerts', sa.dialects.postgresql.JSONB(), nullable=True), + sa.Column('data', sa.dialects.postgresql.JSONB(), nullable=True), + sa.Column( + 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() + ), + sa.Column( + 'updated_at', + sa.DateTime(), + nullable=False, + server_default=sa.func.now(), + onupdate=sa.func.now(), + ), + sa.PrimaryKeyConstraint('id'), + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/cache/cache_manager.py b/wavefront/server/modules/db_repo_module/db_repo_module/cache/cache_manager.py new file mode 100644 index 00000000..14501e24 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/cache/cache_manager.py @@ -0,0 +1,248 @@ +import os +import time +from typing import Any, List, Optional, Union + +from common_module.common_cache import CommonCache +from common_module.log.logger import logger +from redis import ConnectionError +from redis import ConnectionPool +from redis import Redis +from redis import RedisError +from redis import TimeoutError +from tenacity import retry +from tenacity import retry_if_exception_type +from tenacity import stop_after_attempt +from tenacity import wait_exponential + + +class CacheManager(CommonCache): + def __init__( + self, + namespace: str = '', + max_retries: int = 3, + initial_backoff: int = 1, + max_backoff: int = 10, + connection_timeout: int = 60, + socket_timeout: int = 60, + socket_keepalive: bool = True, + pool_size: int = 5, + ): + self.namespace = namespace + self.max_retries = max_retries + self.initial_backoff = initial_backoff + self.max_backoff = max_backoff + + self.pool = self._create_connection_pool( + connection_timeout=connection_timeout, + socket_timeout=socket_timeout, + socket_keepalive=socket_keepalive, + pool_size=pool_size, + ) + + self.redis = self._create_redis_connection() + + # Test the connection immediately - fail fast if Redis is unreachable + try: + self.redis.ping() + logger.info('Connected to Redis with redis ping') + except (ConnectionError, TimeoutError, RedisError) as e: + logger.error(f'Failed to connect to Redis during initialization: {e}') + logger.error('Server will not start without Redis connectivity') + raise RuntimeError(f'Redis connection test failed: {e}') from e + + def _create_connection_pool( + self, + connection_timeout: int, + socket_timeout: int, + socket_keepalive: bool, + pool_size: int, + ) -> ConnectionPool: + try: + return ConnectionPool( + host=str(os.getenv('REDIS_HOST', 'localhost')), + port=int(os.getenv('REDIS_PORT', 6379)), + db=int(os.getenv('REDIS_DB', 0)), + max_connections=pool_size, + socket_timeout=socket_timeout, + socket_keepalive=socket_keepalive, + socket_connect_timeout=connection_timeout, + retry_on_timeout=True, + health_check_interval=30, + encoding='utf-8', + decode_responses=True, + ) + except Exception as e: + logger.error(f'Failed to create connection pool: {e}s') + raise + + def _create_redis_connection(self) -> Redis: + logger.info('Creating Redis connection from pool...') + return Redis(connection_pool=self.pool) + + def _checking_redis_connection(self): + try: + self.redis.ping() + return True + except (ConnectionError, TimeoutError) as e: + logger.warning(f'Redis connection lost: {e}. Attempting to reconnect...') + self.redis = self._create_redis_connection() + return False + + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=1, max=10), + retry=retry_if_exception_type((RedisError, ConnectionError, TimeoutError)), + ) + def add( + self, + key: str, + value: Union[str, int, float, bytes], + expiry: int = 3600, + nx: bool = False, + ) -> bool: + try: + logger.info(f'Adding key: {key} to cache with expiry: {expiry} seconds') + return bool( + self.redis.set(f'{self.namespace}/{key}', value, ex=expiry, nx=nx) + ) + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error adding key: {key} to cache: {e}') + raise + + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=1, max=10), + retry=retry_if_exception_type((RedisError, ConnectionError, TimeoutError)), + ) + def get_str(self, key: str, default: Any = None) -> Optional[str]: + try: + value = self.redis.get(f'{self.namespace}/{key}') + return value if value is not None else default + + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error getting key: {key} from cache: {e}') + raise + + def get_int(self, key: str, default: int = 0) -> int: + value = self.get_str(key, default) + return int(value) if value is not None else default + + def remove(self, key: str) -> bool: + try: + return bool(self.redis.delete(f'{self.namespace}/{key}')) + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error getting key: {key} from cache: {e}') + raise + + def invalidate_query(self, pattern: str) -> int: + """Remove all keys matching the given pattern""" + try: + # Get all keys matching the pattern + search_pattern = f'{self.namespace}/{pattern}' + keys = self.redis.keys(search_pattern) + if keys: + logger.info( + f'Invalidating {len(keys)} cache keys matching pattern: {pattern}' + ) + return self.redis.delete(*keys) + logger.info(f'No cache keys found matching pattern: {pattern}') + return 0 + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error removing keys with pattern: {pattern} from cache: {e}') + raise + + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=1, max=10), + retry=retry_if_exception_type((RedisError, ConnectionError, TimeoutError)), + ) + def publish(self, channel: str, message: str) -> int: + """ + Publish a message to a Redis channel. + + Args: + channel: The channel name to publish to + message: The message to publish + + Returns: + Number of subscribers that received the message + + Raises: + RedisError: If publishing fails + """ + try: + full_channel = f'{self.namespace}/{channel}' + logger.info(f'Publishing message to channel: {full_channel}') + return self.redis.publish(full_channel, message) + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error publishing to channel {channel}: {e}') + raise + + def subscribe( + self, channels: Optional[List[str]] = None, patterns: Optional[List[str]] = None + ): + """ + Subscribe to Redis channels or patterns. + + Args: + channels: List of channel names to subscribe to + patterns: List of patterns to subscribe to (supports wildcards) + + Returns: + PubSub object that can be used to listen for messages + + Example: + # Subscribe to specific channels + pubsub = cache_manager.subscribe(channels=['updates', 'notifications']) + + # Subscribe to patterns + pubsub = cache_manager.subscribe(patterns=['user:*', 'event:*']) + + # Listen for messages + for message in pubsub.listen(): + if message['type'] == 'message': + print(f"Received: {message['data']}") + """ + try: + pubsub = self.redis.pubsub() + + if channels: + namespaced_channels = [f'{self.namespace}/{ch}' for ch in channels] + pubsub.subscribe(*namespaced_channels) + logger.info(f'Subscribed to channels: {namespaced_channels}') + + if patterns: + namespaced_patterns = [f'{self.namespace}/{pat}' for pat in patterns] + pubsub.psubscribe(*namespaced_patterns) + logger.info(f'Subscribed to patterns: {namespaced_patterns}') + + if not channels and not patterns: + logger.warning('No channels or patterns specified for subscription') + + return pubsub + except (RedisError, ConnectionError, TimeoutError) as e: + logger.error(f'Error subscribing to channels/patterns: {e}') + raise + + def close(self): + try: + self.pool.disconnect() + logger.info('Redis connection pool closed successfully') + except Exception as e: + logger.error(f'Error closing Redis connection pool: {e}') + + def _retry_with_backoff(self, func: callable, *args, **kwargs) -> Any: + retries = 0 + while retries < self.max_retries: + try: + return func(*args, **kwargs) + except (RedisError, ConnectionPool, TimeoutError) as e: + retries += 1 + if retries >= self.max_retries: + logger.error(f'Max retries reached for {func.__name__}: {e}') + raise + backoff = min( + self.initial_backoff * (2 ** (retries - 1)), self.max_backoff + ) + logger.warning(f'Retrying {func.__name__} in {backoff} seconds...') + time.sleep(backoff) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/database/base.py b/wavefront/server/modules/db_repo_module/db_repo_module/database/base.py new file mode 100644 index 00000000..59be7030 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/database/base.py @@ -0,0 +1,3 @@ +from sqlalchemy.orm import declarative_base + +Base = declarative_base() diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/database/connection.py b/wavefront/server/modules/db_repo_module/db_repo_module/database/connection.py new file mode 100644 index 00000000..42b17e09 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/database/connection.py @@ -0,0 +1,84 @@ +from dataclasses import dataclass +from pathlib import Path + +from alembic import command +from alembic.config import Config +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine + + +@dataclass +class DatabaseConfig: + username: str + password: str + host: str + port: str + db_name: str + + +class DatabaseClient: + def __init__(self, db_config: DatabaseConfig) -> None: + self.db_config = db_config + self._engine = create_async_engine( + f'postgresql+psycopg://{db_config.username}:{db_config.password}@{db_config.host}:{db_config.port}/{db_config.db_name}' + ) + self.session = async_sessionmaker(autocommit=False, bind=self._engine) + + async def close(self): + if self._engine is None: + raise Exception('DatabaseClient is not initialized') + await self._engine.dispose() + + self._engine = None + self._session = None + + async def connect(self) -> None: + if self._engine is None: + raise Exception('DatabaseClient is not initialized') + + async with self._engine.begin() as connection: + try: + await connection.exec_driver_sql( + 'CREATE EXTENSION IF NOT EXISTS vector;' + ) + except Exception: + await connection.rollback() + raise + + def _get_alembic_config_path(self) -> Path: + """Get the absolute path to the alembic.ini configuration file. + + Returns: + Path: Absolute path to the alembic.ini file + + Raises: + FileNotFoundError: If alembic.ini file is not found + """ + current_file = Path(__file__) + alembic_path = current_file.parent.parent / 'alembic.ini' + + if not alembic_path.exists(): + raise FileNotFoundError( + f'Alembic configuration file not found at: {alembic_path}' + ) + + return alembic_path + + def run_migration(self, target_revision: str = 'head') -> None: + """Run database migrations using Alembic. + + Args: + target_revision: The target revision to migrate to. Defaults to 'head'. + + Raises: + FileNotFoundError: If alembic.ini file is not found + Exception: If migration fails + """ + try: + alembic_config_path = self._get_alembic_config_path() + alembic_config = Config(str(alembic_config_path)) + command.upgrade(alembic_config, target_revision) + except FileNotFoundError as e: + raise FileNotFoundError(f'Migration failed: {e}') + except Exception as e: + raise Exception(f'Migration failed with error: {e}') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/db_repo_container.py b/wavefront/server/modules/db_repo_module/db_repo_module/db_repo_container.py new file mode 100644 index 00000000..05319c52 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/db_repo_container.py @@ -0,0 +1,223 @@ +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.database.connection import DatabaseClient +from db_repo_module.database.connection import DatabaseConfig +from db_repo_module.models.auth_secrets import AuthSecrets +from db_repo_module.models.datasource import Datasource +from db_repo_module.models.email import Email +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.notification_users import NotificationUser +from db_repo_module.models.notifications import Notification +from db_repo_module.models.oauth_credential import OAuthCredential +from db_repo_module.models.resource import Resource +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.task import Task +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from db_repo_module.models.product_analytics import ProductAnalytics +from db_repo_module.models.session import Session +from db_repo_module.models.config import Config +from db_repo_module.models.dynamic_query_yaml import DynamicQueryYaml +from db_repo_module.models.model_schema import ModelSchema +from db_repo_module.models.workflow_pipeline import WorkflowPipeline +from db_repo_module.models.workflow_runs import WorkflowRuns +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.image_search_models import ( + ReferenceImageFeatures, + SIFTFeatures, +) +from db_repo_module.models.ikb_models import ImageKnowledgeBase +from db_repo_module.models.namespace import Namespace +from db_repo_module.models.agent import Agent +from db_repo_module.models.workflow import Workflow +from db_repo_module.models.api_services import ApiServices +from dependency_injector import containers +from dependency_injector import providers + + +class DatabaseModuleContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + db_config = providers.Factory( + DatabaseConfig, + username=config.database.username, + password=config.database.password, + host=config.database.host, + port=config.database.port, + db_name=config.database.db_name, + ) + + db_client = providers.Singleton(DatabaseClient, db_config=db_config) + + email_repository = providers.Singleton( + SQLAlchemyRepository[Email], model=Email, db_client=db_client + ) + + oauth_credential_repository = providers.Singleton( + SQLAlchemyRepository[OAuthCredential], + model=OAuthCredential, + db_client=db_client, + ) + + user_repository = providers.Singleton( + SQLAlchemyRepository[User], model=User, db_client=db_client + ) + + task_repository = providers.Singleton( + SQLAlchemyRepository[Task], model=Task, db_client=db_client + ) + + notification_repository = providers.Singleton( + SQLAlchemyRepository[Notification], model=Notification, db_client=db_client + ) + notification_user_repository = providers.Singleton( + SQLAlchemyRepository[NotificationUser], + model=NotificationUser, + db_client=db_client, + ) + resource_repository = providers.Singleton( + SQLAlchemyRepository[Resource], + model=Resource, + db_client=db_client, + ) + resource_role_repository = providers.Singleton( + SQLAlchemyRepository[RoleResource], + model=RoleResource, + db_client=db_client, + ) + user_resource_repository = providers.Singleton( + SQLAlchemyRepository[UserRole], + model=UserRole, + db_client=db_client, + ) + + cache_manager = providers.Singleton( + CacheManager, namespace=config.env_config.app_name + ) + + knowledge_base_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBase], + model=KnowledgeBase, + db_client=db_client, + ) + + knowledge_base_documents_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseDocuments], + model=KnowledgeBaseDocuments, + db_client=db_client, + ) + + knowledge_base_embeddings_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseEmbeddings], + model=KnowledgeBaseEmbeddings, + db_client=db_client, + ) + + kb_inference_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseInferences], + model=KnowledgeBaseInferences, + db_client=db_client, + ) + + auth_secrets_repository = providers.Singleton( + SQLAlchemyRepository[AuthSecrets], + model=AuthSecrets, + db_client=db_client, + ) + + product_analytics_repository = providers.Singleton( + SQLAlchemyRepository[ProductAnalytics], + model=ProductAnalytics, + db_client=db_client, + ) + + session_repository = providers.Singleton( + SQLAlchemyRepository[Session], + model=Session, + db_client=db_client, + ) + + config_repository = providers.Singleton( + SQLAlchemyRepository[Config], + model=Config, + db_client=db_client, + ) + + dynamic_query_repository = providers.Singleton( + SQLAlchemyRepository[DynamicQueryYaml], + model=DynamicQueryYaml, + db_client=db_client, + ) + + model_inference_repository = providers.Singleton( + SQLAlchemyRepository[ModelSchema], + model=ModelSchema, + db_client=db_client, + ) + + ikb_repository = providers.Singleton( + SQLAlchemyRepository[ImageKnowledgeBase], + model=ImageKnowledgeBase, + db_client=db_client, + ) + + reference_features_repository = providers.Singleton( + SQLAlchemyRepository[ReferenceImageFeatures], + model=ReferenceImageFeatures, + db_client=db_client, + ) + + sift_features_repository = providers.Singleton( + SQLAlchemyRepository[SIFTFeatures], + model=SIFTFeatures, + db_client=db_client, + ) + + workflow_pipeline_repository = providers.Singleton( + SQLAlchemyRepository[WorkflowPipeline], + model=WorkflowPipeline, + db_client=db_client, + ) + + workflow_runs_repository = providers.Singleton( + SQLAlchemyRepository[WorkflowRuns], + model=WorkflowRuns, + db_client=db_client, + ) + + namespace_repository = providers.Singleton( + SQLAlchemyRepository[Namespace], + model=Namespace, + db_client=db_client, + ) + + agent_repository = providers.Singleton( + SQLAlchemyRepository[Agent], + model=Agent, + db_client=db_client, + ) + + workflow_repository = providers.Singleton( + SQLAlchemyRepository[Workflow], + model=Workflow, + db_client=db_client, + ) + + api_services_repository = providers.Singleton( + SQLAlchemyRepository[ApiServices], + model=ApiServices, + db_client=db_client, + ) + + datasource_repository = providers.Singleton( + SQLAlchemyRepository[Datasource], + model=Datasource, + db_client=db_client, + ) + knowledge_base_inference_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseInferences], + model=KnowledgeBaseInferences, + db_client=db_client, + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/agent.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/agent.py new file mode 100644 index 00000000..08a8bc78 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/agent.py @@ -0,0 +1,39 @@ +import uuid +from datetime import datetime + +from sqlalchemy import ForeignKey, String, UniqueConstraint, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class Agent(Base): + __tablename__ = 'agents' + __table_args__ = ( + UniqueConstraint('name', 'namespace', name='uq_agents_name_namespace'), + ) + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(String(length=255), nullable=False) + namespace: Mapped[str] = mapped_column( + ForeignKey('namespaces.name'), nullable=False, index=True + ) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (Agent()).__tablename__ + + def to_dict(self): + return { + 'id': str(self.id), + 'name': self.name, + 'namespace': self.namespace, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/api_services.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/api_services.py new file mode 100644 index 00000000..b8c98c85 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/api_services.py @@ -0,0 +1,33 @@ +from datetime import datetime + +from sqlalchemy import Boolean, String, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class ApiServices(Base): + __tablename__ = 'api_services' + + id: Mapped[str] = mapped_column( + String(length=255), nullable=False, unique=True, primary_key=True + ) + service_def_path: Mapped[str] = mapped_column(String(length=255), nullable=True) + is_active: Mapped[bool] = mapped_column(Boolean(), nullable=False, default=True) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (ApiServices()).__tablename__ + + def to_dict(self): + return { + 'id': self.id, + 'service_def_path': self.service_def_path, + 'is_active': self.is_active, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/auth_secrets.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/auth_secrets.py new file mode 100644 index 00000000..d1c72408 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/auth_secrets.py @@ -0,0 +1,35 @@ +import uuid +from datetime import datetime + +from sqlalchemy import func +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class AuthSecrets(Base): + __tablename__ = 'auth_secrets' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + client_key: Mapped[str] = mapped_column(nullable=False, unique=True) + client_secret: Mapped[str] = mapped_column(nullable=False) + created_at: Mapped[datetime] = mapped_column(nullable=False, default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + nullable=False, default=func.now(), onupdate=func.now() + ) + + def to_dict(self): + return { + 'id': str(self.id), + 'client_key': self.client_key, + 'client_secret': self.client_secret, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } + + @staticmethod + def get_table_name(): + return AuthSecrets().__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/authenticator.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/authenticator.py new file mode 100644 index 00000000..70d5d297 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/authenticator.py @@ -0,0 +1,35 @@ +import uuid +from datetime import datetime + +from sqlalchemy import Boolean, String, func, CheckConstraint +from sqlalchemy.dialects.postgresql import UUID, JSONB +from sqlalchemy.orm import Mapped, mapped_column +from ..database.base import Base + + +class Authenticator(Base): + __tablename__ = 'authenticator' + + auth_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), primary_key=True, default=uuid.uuid4 + ) + auth_name: Mapped[str] = mapped_column( + String(length=64), nullable=False, unique=True + ) + auth_type: Mapped[str] = mapped_column(String, nullable=False) + auth_desc: Mapped[str] = mapped_column(String, nullable=True) + config: Mapped[dict] = mapped_column(JSONB, nullable=False) + is_enabled: Mapped[bool] = mapped_column(Boolean, default=True) + is_deleted: Mapped[bool] = mapped_column(Boolean, default=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + __table_args__ = ( + CheckConstraint("auth_name !~ '\\s'", name='auth_name_no_spaces'), + ) + + @staticmethod + def get_table_name(): + return (Authenticator()).__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/config.py new file mode 100644 index 00000000..9a266f64 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/config.py @@ -0,0 +1,25 @@ +from ..database.base import Base +from sqlalchemy import Column, String, DateTime, func +from sqlalchemy.dialects.postgresql import JSONB + + +class Config(Base): + __tablename__ = 'config' + key = Column(String(255), primary_key=True) + value = Column(JSONB, nullable=False) + created_at = Column(DateTime, nullable=False, default=func.now()) + updated_at = Column( + DateTime, nullable=False, default=func.now(), onupdate=func.now() + ) + + def to_dict(self): + return { + 'key': self.key, + 'value': self.value, + 'created_at': self.created_at, + 'updated_at': self.updated_at, + } + + @staticmethod + def get_table_name(): + return Config.__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/datasource.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/datasource.py new file mode 100644 index 00000000..7c8e4362 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/datasource.py @@ -0,0 +1,47 @@ +import uuid +import json +from datetime import datetime + +from sqlalchemy import String, func +from sqlalchemy.dialects.postgresql import UUID, JSONB +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base + + +class Datasource(Base): + __tablename__ = 'datasource' + + id: Mapped[uuid.UUID] = mapped_column( + UUID, primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(String(length=64), nullable=False) + description: Mapped[str] = mapped_column(String(length=255), nullable=True) + type: Mapped[str] = mapped_column(String(length=64), nullable=False) + config: Mapped[str] = mapped_column(JSONB, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + dynamic_queries = relationship( + 'DynamicQueryYaml', back_populates='datasource', cascade='all' + ) + + @staticmethod + def get_table_name(): + return (Datasource()).__tablename__ + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/documents.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/documents.py new file mode 100644 index 00000000..694c28a0 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/documents.py @@ -0,0 +1,14 @@ +from datetime import datetime + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class Document(Base): + __tablename__ = 'document' + + document_id: Mapped[str] = mapped_column(primary_key=True, index=True) + document_name: Mapped[str] + last_update_timestamp: Mapped[datetime] = mapped_column(default=datetime.now) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/dynamic_query_yaml.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/dynamic_query_yaml.py new file mode 100644 index 00000000..c738c27b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/dynamic_query_yaml.py @@ -0,0 +1,33 @@ +from ..database.base import Base +from sqlalchemy import Column, String, DateTime, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + + +class DynamicQueryYaml(Base): + __tablename__ = 'dynamic_query_yaml' + name = Column(String(255), primary_key=True) + file_path = Column(String(255), nullable=False) + datasource_id = Column( + UUID, ForeignKey('datasource.id', ondelete='CASCADE'), nullable=False + ) + created_at = Column(DateTime, nullable=False, default=func.now()) + updated_at = Column( + DateTime, nullable=False, default=func.now(), onupdate=func.now() + ) + + datasource = relationship('Datasource', back_populates='dynamic_queries') + + def to_dict(self): + return { + 'name': self.name, + 'file_path': self.file_path, + 'datasource_id': str(self.datasource_id), + 'created_at': self.created_at, + 'updated_at': self.updated_at, + } + + @staticmethod + def get_table_name(): + return DynamicQueryYaml.__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/email.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/email.py new file mode 100644 index 00000000..fedc9f26 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/email.py @@ -0,0 +1,16 @@ +from datetime import datetime + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class Email(Base): + __tablename__ = 'email' + + id: Mapped[str] = mapped_column(primary_key=True, index=True) + thread_id: Mapped[str] + account_id: Mapped[str] + content: Mapped[str] + synced_at: Mapped[datetime] = mapped_column(default=datetime.now) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/ikb_models.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/ikb_models.py new file mode 100644 index 00000000..7d4bfc4c --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/ikb_models.py @@ -0,0 +1,40 @@ +from datetime import datetime +from sqlalchemy import ( + String, + Integer, + DateTime, + Text, + JSON, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship +from db_repo_module.database.base import Base + + +class ImageKnowledgeBase(Base): + """Image Knowledge Base - holds metadata about each IKB""" + + __tablename__ = 'image_knowledge_bases' + + ikb_id: Mapped[str] = mapped_column(String(255), primary_key=True) + name: Mapped[str] = mapped_column(String(100), nullable=False) + description: Mapped[str] = mapped_column(Text, nullable=True) + ikb_type: Mapped[str] = mapped_column( + String(50), nullable=False + ) # gold_matching, photo_matching, etc. + algorithm_type: Mapped[str] = mapped_column( + String(50), nullable=False + ) # sift, sam_dinov2, etc. + status: Mapped[str] = mapped_column( + String(20), default='active' + ) # active, inactive + config: Mapped[dict] = mapped_column( + JSON, default=dict + ) # Algorithm-specific configuration + image_count: Mapped[int] = mapped_column(Integer, default=0) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow + ) + + # Relationship to reference images + reference_images = relationship('ReferenceImageFeatures', back_populates='ikb') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/image_search_models.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/image_search_models.py new file mode 100644 index 00000000..93c0f95e --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/image_search_models.py @@ -0,0 +1,64 @@ +from datetime import datetime +import uuid +from sqlalchemy import ( + String, + Float, + Integer, + DateTime, + JSON, + ForeignKey, +) +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship +from db_repo_module.database.base import Base + + +# Base table for all reference images +class ReferenceImageFeatures(Base): + __tablename__ = 'reference_image_features' + + reference_image_id: Mapped[str] = mapped_column(String(255), primary_key=True) + ikb_id: Mapped[str] = mapped_column( + String(255), ForeignKey('image_knowledge_bases.ikb_id'), nullable=True + ) + algorithm_type: Mapped[str] = mapped_column(String(50), nullable=False) + image_url: Mapped[str] = mapped_column(String(500), nullable=False) + image_metadata: Mapped[dict] = mapped_column(JSON, default=dict) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + + # Relationship to IKB + ikb = relationship('ImageKnowledgeBase', back_populates='reference_images') + + +# SIFT-specific table (for 3000-5000 keypoints per image) +class SIFTFeatures(Base): + __tablename__ = 'sift_features' + + id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), primary_key=True, default=uuid.uuid4 + ) + # reference_image_id: Mapped[str] = mapped_column(String(255), nullable=False) + reference_image_id: Mapped[str] = mapped_column( + String(255), + ForeignKey('reference_image_features.reference_image_id', ondelete='CASCADE'), + nullable=False, + ) + keypoint_id: Mapped[int] = mapped_column( + Integer, nullable=False + ) # 0, 1, 2, ..., 4999 + + # Keypoint properties + x: Mapped[float] = mapped_column(Float, nullable=False) + y: Mapped[float] = mapped_column(Float, nullable=False) + size: Mapped[float] = mapped_column(Float, nullable=False) + angle: Mapped[float] = mapped_column(Float, nullable=False) + response: Mapped[float] = mapped_column(Float, nullable=False) + octave: Mapped[int] = mapped_column(Integer, nullable=False) + class_id: Mapped[int] = mapped_column(Integer, nullable=False) + + # Descriptor (128 values as JSON array) + descriptor: Mapped[list] = mapped_column( + JSON, nullable=False + ) # [0.12, 0.34, ..., 0.78] + + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/kb_inferences.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/kb_inferences.py new file mode 100644 index 00000000..83aa48ef --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/kb_inferences.py @@ -0,0 +1,42 @@ +from datetime import datetime +import uuid +import json + +from sqlalchemy import ForeignKey +from sqlalchemy import JSON +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class KnowledgeBaseInferences(Base): + __tablename__ = 'knowledge_base_inferences' + + inference_id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + knowledge_base_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('knowledge_bases.id', ondelete='CASCADE'), + nullable=False, + ) + inference_content: Mapped[dict] = mapped_column(JSON, nullable=False) + config_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('llm_inference_config.id', ondelete='CASCADE'), nullable=True + ) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(default=datetime.now) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_documents.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_documents.py new file mode 100644 index 00000000..22046cff --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_documents.py @@ -0,0 +1,44 @@ +from datetime import datetime +import json +import uuid + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy import ForeignKey, JSON + +from ..database.base import Base + + +class KnowledgeBaseDocuments(Base): + __tablename__ = 'knowledge_base_documents' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + knowledge_base_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('knowledge_bases.id', ondelete='CASCADE'), + nullable=False, + ) + file_path: Mapped[str] = mapped_column(nullable=False) + file_name: Mapped[str] = mapped_column(nullable=False) + file_type: Mapped[str] = mapped_column(nullable=False) + file_size: Mapped[int] = mapped_column(nullable=False) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(default=datetime.now) + metadata_value: Mapped[dict] = mapped_column( + JSON, nullable=True, default=lambda: {} + ) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_embeddings.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_embeddings.py new file mode 100644 index 00000000..96f45330 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_base_embeddings.py @@ -0,0 +1,39 @@ +from datetime import datetime +import os +import uuid + +from pgvector.sqlalchemy import Vector +from sqlalchemy import Column +from sqlalchemy import ForeignKey +from sqlalchemy import Text +from sqlalchemy.dialects.postgresql import TSVECTOR +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class KnowledgeBaseEmbeddings(Base): + __tablename__ = 'knowledge_base_embeddings' + + id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True + ) + document_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('knowledge_base_documents.id', ondelete='CASCADE'), + nullable=False, + ) + # Using pgvector's Vector type for proper vector storage + embedding_vector = ( + Column(Vector) if os.environ.get('APP_ENV') != 'test' else Column(Text) + ) + embedding_vector_1 = ( + Column(Vector, nullable=True) + if os.environ.get('APP_ENV') != 'test' + else Column(Text) + ) + chunk_text: Mapped[str] = mapped_column(Text, nullable=False) + chunk_index: Mapped[int] = mapped_column(nullable=False) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + token = Column(TSVECTOR) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_bases.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_bases.py new file mode 100644 index 00000000..fceea41c --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/knowledge_bases.py @@ -0,0 +1,37 @@ +from datetime import datetime +import json +import uuid + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class KnowledgeBase(Base): + __tablename__ = 'knowledge_bases' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(nullable=False, unique=True) + description: Mapped[str] = mapped_column(nullable=True) + type: Mapped[str] = mapped_column(nullable=False) + vector_size: Mapped[int] = mapped_column(nullable=True) + vector_size_1: Mapped[int] = mapped_column(nullable=True) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column(default=datetime.now) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/llm_inference_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/llm_inference_config.py new file mode 100644 index 00000000..0fd17223 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/llm_inference_config.py @@ -0,0 +1,47 @@ +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import String, JSON, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class LlmInferenceConfig(Base): + __tablename__ = 'llm_inference_config' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + llm_model: Mapped[str] = mapped_column(String(length=255), nullable=False) + display_name: Mapped[str] = mapped_column(String(length=255), nullable=False) + api_key: Mapped[Optional[str]] = mapped_column(String(length=512), nullable=True) + type: Mapped[str] = mapped_column(String(length=64), nullable=False) + base_url: Mapped[Optional[str]] = mapped_column(String(length=512), nullable=True) + parameters: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True) + is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (LlmInferenceConfig()).__tablename__ + + def to_dict(self, exclude_api_key: bool = True): + result = {} + for column in self.__table__.columns: + # Skip api_key in responses for security + if exclude_api_key and column.name == 'api_key': + continue + + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/message_processors.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/message_processors.py new file mode 100644 index 00000000..26624f5d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/message_processors.py @@ -0,0 +1,46 @@ +from datetime import datetime +import uuid +from typing import Optional + +from sqlalchemy import func +from sqlalchemy import String, Text +from sqlalchemy import UUID + +from sqlalchemy.orm import Mapped, mapped_column +from ..database.base import Base + + +class MessageProcessors(Base): + """ + Model for storing function definitions that can be executed in isolated VMs (Node.js/Deno). + YAML files are stored in cloud storage buckets, and the file URL is stored in the source field. + """ + + __tablename__ = 'message_processors' + + id: Mapped[uuid.UUID] = mapped_column( + UUID, primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(String(length=64), unique=True, nullable=False) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + source: Mapped[str] = mapped_column( + String(length=512), unique=True, nullable=False + ) # YAML file URL/path in bucket + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + def to_dict(self): + return { + 'id': str(self.id), + 'name': self.name, + 'description': self.description, + 'source': self.source, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } + + @staticmethod + def get_table_name(): + return MessageProcessors.__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/model_schema.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/model_schema.py new file mode 100644 index 00000000..0d5d0529 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/model_schema.py @@ -0,0 +1,34 @@ +from datetime import datetime +import uuid +import json + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class ModelSchema(Base): + __tablename__ = 'model_inference' + + model_id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + model_name: Mapped[str] = mapped_column(nullable=True, unique=True) + model_path: Mapped[str] = mapped_column(nullable=True) + model_type: Mapped[str] = mapped_column(nullable=True) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/namespace.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/namespace.py new file mode 100644 index 00000000..264bf042 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/namespace.py @@ -0,0 +1,27 @@ +from datetime import datetime + +from sqlalchemy import String, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class Namespace(Base): + __tablename__ = 'namespaces' + + name: Mapped[str] = mapped_column(String(length=255), primary_key=True, index=True) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (Namespace()).__tablename__ + + def to_dict(self): + return { + 'name': self.name, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/notification_users.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/notification_users.py new file mode 100644 index 00000000..168d449b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/notification_users.py @@ -0,0 +1,24 @@ +import uuid + +from sqlalchemy import Boolean +from sqlalchemy import Column +from sqlalchemy import ForeignKey +from sqlalchemy import UUID +from sqlalchemy.orm import relationship + +from ..database.base import Base + + +class NotificationUser(Base): + __tablename__ = 'notification_user' + + id = Column(UUID, primary_key=True, default=uuid.uuid4) + user_id = Column(UUID, ForeignKey('user.id'), nullable=False) + notification_id = Column(UUID, ForeignKey('notification.id'), nullable=False) + seen = Column(Boolean, nullable=False, default=False) + + notification = relationship('Notification', back_populates='notification_user') + + @staticmethod + def get_table_name(): + return (NotificationUser()).__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/notifications.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/notifications.py new file mode 100644 index 00000000..8c73b1d5 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/notifications.py @@ -0,0 +1,28 @@ +import uuid + +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func + +from ..database.base import Base + + +class Notification(Base): + __tablename__ = 'notification' + + id = Column(UUID, primary_key=True, default=uuid.uuid4) + title: String = Column(String, nullable=False) + type: String = Column(String, nullable=False) + created_at = Column(DateTime, nullable=False, server_default=func.now()) + updated_at = Column( + DateTime, nullable=False, server_default=func.now(), onupdate=func.now() + ) + + notification_user = relationship('NotificationUser', back_populates='notification') + + @staticmethod + def get_table_name(): + return (Notification()).__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/oauth_credential.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/oauth_credential.py new file mode 100644 index 00000000..9d778026 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/oauth_credential.py @@ -0,0 +1,22 @@ +from typing import List + +from sqlalchemy import JSON +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class OAuthCredential(Base): + __tablename__ = 'oauth_credential' + + id: Mapped[str] = mapped_column(index=True, primary_key=True) + email: Mapped[str] + provider: Mapped[str] # eg: google/azure + access_token: Mapped[str] + refresh_token: Mapped[str] + token_uri: Mapped[str] = mapped_column(nullable=True) + client_id: Mapped[str] = mapped_column(nullable=True) + client_secret: Mapped[str] = mapped_column(nullable=True) + scopes: Mapped[List[str]] = mapped_column(JSON) + expiry: Mapped[int] = mapped_column(nullable=True) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/product_analytics.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/product_analytics.py new file mode 100644 index 00000000..41ae6c96 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/product_analytics.py @@ -0,0 +1,42 @@ +from sqlalchemy import Column, DateTime, func +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import UUID +import uuid +from datetime import datetime +from ..database.base import Base + + +class ProductAnalytics(Base): + __tablename__ = 'product_analytics' + event_id = Column(UUID, primary_key=True, default=uuid.uuid4) + event_name = Column(String, nullable=False) + type = Column(String, nullable=True) + sub_type = Column(String, nullable=True) + category = Column(String, nullable=True) + sub_category = Column(String, nullable=True) + action = Column(String, nullable=True) + action_type = Column(String, nullable=True) + page = Column(String, nullable=False) + page_path = Column(String, nullable=False) + matadata = Column(JSONB, nullable=True) + user_id = Column(UUID, nullable=False) + session_id = Column(UUID, nullable=False) + user_role = Column(String, nullable=False) + created_at = Column(DateTime, nullable=False, server_default=func.now()) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + else: + result[column.name] = value + return result + + @staticmethod + def get_table_name(): + return (ProductAnalytics()).__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/resource.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/resource.py new file mode 100644 index 00000000..c2891fd9 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/resource.py @@ -0,0 +1,54 @@ +from datetime import datetime +from enum import Enum +import json +import uuid + +from sqlalchemy import UniqueConstraint +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base +from .role_resource import RoleResource + + +class ResourceScope(str, Enum): + DATA = 'data' + DASHBOARD = 'dashboard' + CONSOLE = 'console' + + +class Resource(Base): + __tablename__ = 'resource' + + id: Mapped[str] = mapped_column(primary_key=True, index=True, default=uuid.uuid4) + key: Mapped[str] = mapped_column(nullable=False) + value: Mapped[str] = mapped_column(nullable=False) + description: Mapped[str] = mapped_column(nullable=True) + scope: Mapped[str] = mapped_column(nullable=False) + meta: Mapped[str] = mapped_column(nullable=True) + + # Update relationship with explicit secondary model + roles = relationship( + 'Role', secondary=RoleResource.__table__, back_populates='resources' + ) + + __table_args__ = (UniqueConstraint('key', 'value', name='key_value'),) + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'meta': + result[column.name] = json.loads(value) if value else None + else: + result[column.name] = value + return result + + @staticmethod + def get_table_name(): + return (Resource()).__tablename__ diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/role.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/role.py new file mode 100644 index 00000000..76281669 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/role.py @@ -0,0 +1,26 @@ +import uuid + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base +from .role_resource import RoleResource +from .user_role import UserRole + + +class Role(Base): + __tablename__ = 'role' + + id: Mapped[str] = mapped_column(index=True, primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(nullable=False) + description: Mapped[str] = mapped_column(nullable=True) + + # Update relationships with explicit secondary models + users = relationship('User', secondary=UserRole.__table__, back_populates='roles') + resources = relationship( + 'Resource', secondary=RoleResource.__table__, back_populates='roles' + ) + + def to_dict(self): + return {'id': self.id, 'name': self.name, 'description': self.description} diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/role_resource.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/role_resource.py new file mode 100644 index 00000000..7862d4c4 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/role_resource.py @@ -0,0 +1,16 @@ +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class RoleResource(Base): + __tablename__ = 'role_resource' + + role_id: Mapped[str] = mapped_column( + ForeignKey('role.id', ondelete='CASCADE'), primary_key=True + ) + resource_id: Mapped[str] = mapped_column( + ForeignKey('resource.id', ondelete='CASCADE'), primary_key=True + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/saml_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/saml_config.py new file mode 100644 index 00000000..bc6d24ae --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/saml_config.py @@ -0,0 +1,37 @@ +from datetime import datetime +import uuid + +from sqlalchemy import Boolean +from sqlalchemy import Column +from sqlalchemy import DateTime +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import UUID + +from ..database.base import Base + + +class BaseModel(Base): + __abstract__ = True + created_at = Column(DateTime, default=datetime.now) + updated_at = Column(DateTime, default=datetime.now) + + def save(self, session): + self.updated_at = datetime.now() + session.add(self) + session.commit() + + +class SAMLConfig(BaseModel): + __tablename__ = 'saml_config' + + id = Column(UUID, primary_key=True, default=uuid.uuid4) + entity_id = Column(String, nullable=False) + sso_url = Column(String, nullable=False) + slo_url = Column(String) + x509_certificate = Column(String, nullable=False) + name_id_format = Column(String) + metadata_xml = Column(String) + created_at = Column(DateTime, default=datetime.now) + updated_at = Column(DateTime, default=datetime.now) + is_active = Column(Boolean, default=True, index=True) + created_by = Column(UUID) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/session.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/session.py new file mode 100644 index 00000000..8a065304 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/session.py @@ -0,0 +1,29 @@ +from datetime import datetime +from datetime import timezone +import uuid + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base + + +class Session(Base): + __tablename__ = 'user_session' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + user_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('user.id', ondelete='CASCADE'), nullable=False, index=True + ) + device_info: Mapped[str] = mapped_column(nullable=True) + created_at: Mapped[datetime] = mapped_column(default=datetime.utcnow) + updated_at: Mapped[datetime] = mapped_column( + default=datetime.now(timezone.utc), onupdate=datetime.now(timezone.utc) + ) + + # Relationship + user = relationship('User', back_populates='sessions') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/stt_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/stt_config.py new file mode 100644 index 00000000..96569899 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/stt_config.py @@ -0,0 +1,56 @@ +import json +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import String, Text, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class SttConfig(Base): + __tablename__ = 'stt_configs' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + display_name: Mapped[str] = mapped_column(String(length=100), nullable=False) + description: Mapped[Optional[str]] = mapped_column( + String(length=500), nullable=True + ) + provider: Mapped[str] = mapped_column(String(length=64), nullable=False) + api_key: Mapped[str] = mapped_column(String(length=512), nullable=False) + language: Mapped[Optional[str]] = mapped_column(String(length=64), nullable=True) + parameters: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (SttConfig()).__tablename__ + + def to_dict(self, exclude_api_key: bool = True): + result = {} + for column in self.__table__.columns: + # Skip api_key in responses for security + if exclude_api_key and column.name == 'api_key': + continue + + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'parameters' and value: + # Parse JSON field + try: + result[column.name] = json.loads(value) + except (json.JSONDecodeError, TypeError): + result[column.name] = value + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/task.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/task.py new file mode 100644 index 00000000..839de112 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/task.py @@ -0,0 +1,19 @@ +from datetime import datetime + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class Task(Base): + __tablename__ = 'task' + + message_id: Mapped[str] = mapped_column(primary_key=True) + thread_id: Mapped[str] = mapped_column(nullable=False) + account_id: Mapped[str] = mapped_column(nullable=False) + sender: Mapped[str] = mapped_column(nullable=False) + title: Mapped[str] = mapped_column(nullable=False) + description: Mapped[str] = mapped_column(nullable=False) + priority: Mapped[str] = mapped_column(nullable=False) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/team.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/team.py new file mode 100644 index 00000000..9dc6f9d2 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/team.py @@ -0,0 +1,16 @@ +import uuid + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class Team(Base): + __tablename__ = 'team' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(nullable=False) + description: Mapped[str] diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/telephony_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/telephony_config.py new file mode 100644 index 00000000..ace8edf2 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/telephony_config.py @@ -0,0 +1,62 @@ +import json +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import String, Text, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class TelephonyConfig(Base): + __tablename__ = 'telephony_configs' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + display_name: Mapped[str] = mapped_column(String(length=100), nullable=False) + description: Mapped[Optional[str]] = mapped_column( + String(length=500), nullable=True + ) + provider: Mapped[str] = mapped_column(String(length=64), nullable=False) + connection_type: Mapped[str] = mapped_column(String(length=64), nullable=False) + credentials: Mapped[str] = mapped_column(Text, nullable=False) + phone_numbers: Mapped[str] = mapped_column(Text, nullable=False) + webhook_config: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + sip_config: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (TelephonyConfig()).__tablename__ + + def to_dict(self, exclude_credentials: bool = True): + result = {} + for column in self.__table__.columns: + # Skip credentials in responses for security + if exclude_credentials and column.name == 'credentials': + continue + + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif ( + column.name + in ['credentials', 'phone_numbers', 'webhook_config', 'sip_config'] + and value + ): + # Parse JSON fields + try: + result[column.name] = json.loads(value) + except (json.JSONDecodeError, TypeError): + result[column.name] = value + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/tts_config.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/tts_config.py new file mode 100644 index 00000000..ff5f4f88 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/tts_config.py @@ -0,0 +1,57 @@ +import json +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import String, Text, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class TtsConfig(Base): + __tablename__ = 'tts_configs' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + display_name: Mapped[str] = mapped_column(String(length=100), nullable=False) + description: Mapped[Optional[str]] = mapped_column( + String(length=500), nullable=True + ) + provider: Mapped[str] = mapped_column(String(length=64), nullable=False) + voice_id: Mapped[str] = mapped_column(String(length=255), nullable=False) + api_key: Mapped[str] = mapped_column(String(length=512), nullable=False) + language: Mapped[Optional[str]] = mapped_column(String(length=64), nullable=True) + parameters: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (TtsConfig()).__tablename__ + + def to_dict(self, exclude_api_key: bool = True): + result = {} + for column in self.__table__.columns: + # Skip api_key in responses for security + if exclude_api_key and column.name == 'api_key': + continue + + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'parameters' and value: + # Parse JSON field + try: + result[column.name] = json.loads(value) + except (json.JSONDecodeError, TypeError): + result[column.name] = value + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/user.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/user.py new file mode 100644 index 00000000..ac13925c --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/user.py @@ -0,0 +1,51 @@ +import uuid +from datetime import datetime +from typing import Optional + +from db_repo_module.models.user_role import UserRole +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base +from ..models.session import Session + + +class User(Base): + __tablename__ = 'user' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + email: Mapped[str] = mapped_column(nullable=False, unique=True) + password: Mapped[str] = mapped_column(nullable=False) + first_name: Mapped[str] = mapped_column(nullable=False) + last_name: Mapped[str] = mapped_column(nullable=False) + deleted: Mapped[bool] = mapped_column(default=False) + + # Account lockout fields + failed_attempts: Mapped[int] = mapped_column(default=0, nullable=False) + locked_until: Mapped[Optional[datetime]] = mapped_column(nullable=True) + last_failed_attempt: Mapped[Optional[datetime]] = mapped_column(nullable=True) + last_login_at: Mapped[Optional[datetime]] = mapped_column(nullable=True) + + # Add relationship + roles = relationship( + 'Role', + secondary=UserRole.__table__, + back_populates='users', + cascade='all, delete', + ) + + # Add relationship for sessions + sessions = relationship( + Session, back_populates='user', cascade='all, delete-orphan' + ) + + def to_dict(self): + return { + 'id': str(self.id), + 'email': self.email, + 'first_name': self.first_name, + 'last_name': self.last_name, + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/user_role.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/user_role.py new file mode 100644 index 00000000..cd61a38c --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/user_role.py @@ -0,0 +1,16 @@ +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column + +from ..database.base import Base + + +class UserRole(Base): + __tablename__ = 'user_role' + + user_id: Mapped[str] = mapped_column( + ForeignKey('user.id', ondelete='CASCADE'), primary_key=True + ) + role_id: Mapped[str] = mapped_column( + ForeignKey('role.id', ondelete='CASCADE'), primary_key=True + ) diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/voice_agent.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/voice_agent.py new file mode 100644 index 00000000..ebfda80b --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/voice_agent.py @@ -0,0 +1,62 @@ +import json +import uuid +from datetime import datetime +from typing import Optional + +from sqlalchemy import ForeignKey, String, Text, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class VoiceAgent(Base): + __tablename__ = 'voice_agents' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(String(length=255), nullable=False) + description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + llm_config_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('llm_inference_config.id'), nullable=False + ) + tts_config_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('tts_configs.id'), nullable=False + ) + stt_config_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('stt_configs.id'), nullable=False + ) + telephony_config_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('telephony_configs.id'), nullable=False + ) + system_prompt: Mapped[str] = mapped_column(Text, nullable=False) + conversation_config: Mapped[Optional[str]] = mapped_column(Text, nullable=True) + welcome_message: Mapped[str] = mapped_column(Text, nullable=False) + status: Mapped[str] = mapped_column(String(length=64), nullable=False) + is_deleted: Mapped[bool] = mapped_column(default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (VoiceAgent()).__tablename__ + + def to_dict(self): + result = {} + for column in self.__table__.columns: + value = getattr(self, column.name) + if isinstance(value, uuid.UUID): + result[column.name] = str(value) + elif isinstance(value, datetime): + result[column.name] = value.isoformat() + elif column.name == 'conversation_config' and value: + # Parse JSON field + try: + result[column.name] = json.loads(value) + except (json.JSONDecodeError, TypeError): + result[column.name] = value + else: + result[column.name] = value + return result diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow.py new file mode 100644 index 00000000..1540fa56 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow.py @@ -0,0 +1,39 @@ +import uuid +from datetime import datetime + +from sqlalchemy import ForeignKey, String, UniqueConstraint, func +from sqlalchemy.orm import Mapped, mapped_column + +from ..database.base import Base + + +class Workflow(Base): + __tablename__ = 'workflows' + __table_args__ = ( + UniqueConstraint('name', 'namespace', name='uq_workflows_name_namespace'), + ) + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(String(length=255), nullable=False) + namespace: Mapped[str] = mapped_column( + ForeignKey('namespaces.name'), nullable=False, index=True + ) + created_at: Mapped[datetime] = mapped_column(default=func.now()) + updated_at: Mapped[datetime] = mapped_column( + default=func.now(), onupdate=func.now() + ) + + @staticmethod + def get_table_name(): + return (Workflow()).__tablename__ + + def to_dict(self): + return { + 'id': str(self.id), + 'name': self.name, + 'namespace': self.namespace, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_pipeline.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_pipeline.py new file mode 100644 index 00000000..f2e8e377 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_pipeline.py @@ -0,0 +1,52 @@ +import uuid +from datetime import datetime + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship + +from ..database.base import Base + + +class WorkflowPipeline(Base): + __tablename__ = 'workflow_pipeline' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + name: Mapped[str] = mapped_column(nullable=False) + description: Mapped[str] = mapped_column(nullable=True) + workflow_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('workflows.id'), nullable=False, index=True + ) + retry_policy: Mapped[str] = mapped_column(nullable=True) + timeout: Mapped[int] = mapped_column(nullable=True) + concurrency_limit: Mapped[int] = mapped_column(nullable=True, default=1) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column( + default=datetime.now, onupdate=datetime.now + ) + + # Relationship to Workflow + workflow = relationship('Workflow', foreign_keys=[workflow_id]) + + # Relationship to WorkflowRuns + runs = relationship( + 'WorkflowRuns', + back_populates='workflow_pipeline', + cascade='all, delete', + ) + + def to_dict(self): + return { + 'id': str(self.id), + 'name': self.name, + 'description': self.description, + 'workflow_id': str(self.workflow_id), + 'retry_policy': self.retry_policy, + 'timeout': self.timeout, + 'concurrency_limit': self.concurrency_limit, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_runs.py b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_runs.py new file mode 100644 index 00000000..bb16c09d --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/models/workflow_runs.py @@ -0,0 +1,46 @@ +import uuid +from datetime import datetime + +from sqlalchemy.orm import Mapped +from sqlalchemy.orm import mapped_column +from sqlalchemy.orm import relationship +from sqlalchemy import ForeignKey + +from ..database.base import Base + + +class WorkflowRuns(Base): + __tablename__ = 'workflow_runs' + + id: Mapped[uuid.UUID] = mapped_column( + primary_key=True, default=uuid.uuid4, index=True + ) + workflow_pipeline_id: Mapped[uuid.UUID] = mapped_column( + ForeignKey('workflow_pipeline.id', ondelete='CASCADE'), nullable=False + ) + status: Mapped[str] = mapped_column( + nullable=False + ) # initiated, in_progress, completed, failed + start_time: Mapped[datetime] = mapped_column(nullable=False) + end_time: Mapped[datetime] = mapped_column(nullable=True) + error: Mapped[str] = mapped_column(nullable=True) + output: Mapped[str] = mapped_column(nullable=True) + created_at: Mapped[datetime] = mapped_column(default=datetime.now) + updated_at: Mapped[datetime] = mapped_column( + default=datetime.now, onupdate=datetime.now + ) + + workflow_pipeline = relationship('WorkflowPipeline', back_populates='runs') + + def to_dict(self): + return { + 'id': str(self.id), + 'workflow_pipeline_id': str(self.workflow_pipeline_id), + 'status': self.status, + 'start_time': self.start_time.isoformat(), + 'end_time': self.end_time.isoformat() if self.end_time else None, + 'error': self.error, + 'output': self.output, + 'created_at': self.created_at.isoformat(), + 'updated_at': self.updated_at.isoformat(), + } diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/repositories/sql_alchemy_repository.py b/wavefront/server/modules/db_repo_module/db_repo_module/repositories/sql_alchemy_repository.py new file mode 100644 index 00000000..05537f46 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/db_repo_module/repositories/sql_alchemy_repository.py @@ -0,0 +1,226 @@ +from typing import Any, Generic, Type, TypeVar + +from sqlalchemy import delete +from sqlalchemy import func +from sqlalchemy import insert +from sqlalchemy import select +from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import Session +from sqlalchemy.sql import text + +from ..database.base import Base +from ..database.connection import DatabaseClient + +T = TypeVar('T', bound=Base) # type: ignore + + +class SQLAlchemyRepository(Generic[T]): + def __init__(self, model: Type[T], db_client: DatabaseClient): + """ + Initialize the repository with a specific model. + + :param model: The Cassandra model class (subclass of cassandra.cqlengine.models.Model). + """ + self.model: Type[T] = model + self.session: Session = db_client.session + + async def create(self, **kwargs) -> T: + """ + Create a new record in the Cassandra database. + + :param kwargs: The fields and their values to create the record. + :return: The created instance of the model. + """ + async with self.session() as session: + session: AsyncSession + instance = self.model(**kwargs) + session.add(instance) + await session.commit() + await session.refresh(instance) + + return instance + + async def create_all( + self, + records: list[T], + replace: bool = False, + session: AsyncSession | None = None, + ): + """ + Create new records in the Postgres database. + + :param records: List of records + :param replace: Replace a record if it already exists. Default: False + :param session: Optional session for transaction management + :return: The created instances of the model. + """ + model_instances = [] + for data in records: + model_instances.append(data) + + if session: + for instance in model_instances: + await session.merge(instance) if replace else session.add(instance) + return records + else: + async with self.session() as session: + session: AsyncSession + for instance in model_instances: + await session.merge(instance) if replace else session.add(instance) + await session.commit() + return records + + async def find(self, limit: int = 100, **filters) -> list[T]: + """ + Find all records in the database matching the given filters. + + :param filters: The filters to apply to the query. + :return: A list of matching model instances. + """ + if 'session' in filters and isinstance(filters['session'], AsyncSession): + session = filters['session'] + del filters['session'] + query = select(self.model) + for key, value in filters.items(): + if isinstance(value, list): + query = query.where(getattr(self.model, key).in_(value)) + else: + query = query.where(getattr(self.model, key) == value) + query = query.limit(limit) + return (await session.scalars(query)).all() + + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + if isinstance(value, list): + query = query.where(getattr(self.model, key).in_(value)) + else: + query = query.where(getattr(self.model, key) == value) + query = query.limit(limit) + return (await session.scalars(query)).all() + + async def find_one(self, **filters) -> T | None: + """ + Find the first record in the database matching the given filters. + + :param filters: The filters to apply to the query. + :return: The first matching model instance, or None if no match is found. + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + return await session.scalar(query) + + async def find_one_and_update( + self, filters: dict[str, Any], refresh: bool = False, **update_data + ) -> T | None: + """ + Find the first record in the database matching the given filters, and update it with the provided data. + + :param filters: The filters to apply to the query. + :param update_data: The data to update the record with. + :return: The updated model instance, or None if no match is found. + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + instance = await session.scalar(query) + if instance: + for key, value in update_data.items(): + setattr(instance, key, value) + await session.commit() + if refresh: + await session.refresh( + instance + ) # Refresh to ensure object is properly attached + return instance + else: + return None + + async def delete_all(self, **filters) -> None: + """ + Delete all records in the database matching the given filters. + + :param filters: The filters to apply to the query. + """ + async with self.session() as session: + session: AsyncSession + query = delete(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + await session.execute(query) + await session.commit() + return True + + async def check_empty(self) -> bool: + """ + Check if the database table is empty. + + :return: True if the table is empty, False otherwise. + """ + async with self.session() as session: + session: AsyncSession + count = await session.scalar(select(func.count()).select_from(self.model)) + return count == 0 + + async def count(self, **filters) -> int: + """ + retrive all the data from the table + :return the count after applying the filters + """ + async with self.session() as session: + session: AsyncSession + query = select(func.count()).select_from(self.model) + for key, value in filters.items(): + query = query.where(getattr(self.model, key) == value) + return await session.scalar(query) + + async def execute_query(self, query: str, params={}, model_class=None) -> list: + """ + Execute a raw SQL query or an SQLAlchemy query asynchronously and return the results. + + :param query: The SQLAlchemy `select` query or raw SQL string. + :return: A list of matching records. + """ + async with self.session() as session: + session: AsyncSession + result = await session.execute(text(query), params) + columns = result.keys() + rows = [dict(zip(columns, row)) for row in result.all()] + if model_class: + return [model_class(**row) for row in rows] + return rows + + async def upsert(self, filters: dict[str, Any], **update_values): + """ + Find the first record in the database matching the given filters + if the record exists it will update the record with specified filters + otherwise it will create an record with filters and update_values + """ + async with self.session() as session: + session: AsyncSession + query = select(self.model).filter_by(**filters) + result = await session.execute(query) + existing_count = result.scalar_one_or_none() + if existing_count: + stmt = ( + update(self.model) + .where( + *( + getattr(self.model, key) == val + for key, val in filters.items() + ) + ) + .values(**update_values) + ) + await session.execute(stmt) + else: + stmt = insert(self.model).values({**filters, **update_values}) + await session.execute(stmt) + await session.commit() diff --git a/wavefront/server/modules/db_repo_module/pyproject.toml b/wavefront/server/modules/db_repo_module/pyproject.toml new file mode 100644 index 00000000..a1bdcda4 --- /dev/null +++ b/wavefront/server/modules/db_repo_module/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "db-repo-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common_module", + "dependency-injector>=4.42.0,<5.0.0", + "sqlalchemy>=2.0.36,<3.0.0", + "alembic>=1.14.1,<2.0.0", + "redis>=5.2.1,<6.0.0", + "pgvector>=0.4.1", + "tenacity>=8.1.0,<9.0.0", + "psycopg[binary,pool]>=3.2.3,<4.0.0", +] + +[tool.uv.sources] +common-module = { workspace = true } + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["db_repo_module"] diff --git a/wavefront/server/modules/gold_module/gold_module/controllers/image_controller.py b/wavefront/server/modules/gold_module/gold_module/controllers/image_controller.py new file mode 100644 index 00000000..512227a9 --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/controllers/image_controller.py @@ -0,0 +1,90 @@ +import base64 +import re +import httpx + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from common_module.log.logger import logger +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Depends +from fastapi import status +from fastapi.responses import JSONResponse +from gold_module.gold_container import GoldContainer +from gold_module.services.image_service import ImageService +from gold_module.models.gold_image_request import ImageAnalysisRequest + +image_controller = APIRouter() + + +@image_controller.post('/analyse') +@inject +async def process_image( + request: ImageAnalysisRequest, + image_service: ImageService = Depends(Provide[GoldContainer.image_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + image_str = request.image + metadata = request.metadata + extra_fields = metadata.get_extra_fields() + if extra_fields: + logger.info(f'Unnecessary extra fields: {extra_fields}') + + # remove extra not required fields from metadata + filtered_metadata_dict = metadata.get_defined_fields() + + gold_image = None + + # Check for data URL (base64 with MIME) + data_url_pattern = r'^data:(image/\w+);base64,(.+)' + match = re.match(data_url_pattern, image_str) + if match: + try: + gold_image = base64.b64decode(match.group(2)) + except Exception: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Invalid base64 image encoding' + ), + ) + elif image_str.startswith('http://') or image_str.startswith('https://'): + # Download the image from the URL + try: + async with httpx.AsyncClient() as client: + resp = await client.get(image_str) + if resp.status_code != 200: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Failed to download image from URL' + ), + ) + gold_image = resp.content + except Exception: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Error downloading image from URL' + ), + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Image must be a data URL or a direct image URL' + ), + ) + if not gold_image: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('Empty image file'), + ) + result = await image_service.process_image(gold_image, filtered_metadata_dict) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(result), + ) diff --git a/wavefront/server/modules/gold_module/gold_module/controllers/router.py b/wavefront/server/modules/gold_module/gold_module/controllers/router.py new file mode 100644 index 00000000..1e5cadfe --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/controllers/router.py @@ -0,0 +1,5 @@ +from fastapi.routing import APIRouter +from gold_module.controllers.image_controller import image_controller + +gold_router = APIRouter() +gold_router.include_router(image_controller, prefix='/v1/image') diff --git a/wavefront/server/modules/gold_module/gold_module/gold_container.py b/wavefront/server/modules/gold_module/gold_module/gold_container.py new file mode 100644 index 00000000..2f4b737f --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/gold_container.py @@ -0,0 +1,36 @@ +import os + +from dependency_injector import containers +from dependency_injector import providers +from gold_module.services.cloud_image_service import AWSImageService +from gold_module.services.cloud_image_service import GCPImageService +from gold_module.services.image_service import ImageService + + +class GoldContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + cloud_provider = providers.Singleton( + lambda: os.environ.get('CLOUD_PROVIDER', 'gcp').lower() + ) + + aws_image_service = providers.Singleton( + AWSImageService, + bucket_name=config.aws.aws_asset_storage_bucket, + queue_url=config.aws.queue_url, + region=config.aws.region, + ) + + gcp_image_service = providers.Singleton( + GCPImageService, + bucket_name=config.gcp.gcp_asset_storage_bucket, + project_id=config.gcp.gcp_project_id, + topic_id=config.gcp.gold_topic_id, + ) + + # provider.selector is basically an if/else. if cloud_provider = gcp, gcp_image_service will be selected + cloud_service = providers.Selector( + cloud_provider, aws=aws_image_service, gcp=gcp_image_service + ) + + image_service = providers.Singleton(ImageService, cloud_service=cloud_service) diff --git a/wavefront/server/modules/gold_module/gold_module/models/gold_image_request.py b/wavefront/server/modules/gold_module/gold_module/models/gold_image_request.py new file mode 100644 index 00000000..ef8d8305 --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/models/gold_image_request.py @@ -0,0 +1,90 @@ +from datetime import datetime +from typing import List +from pydantic import BaseModel, ConfigDict + + +class Item(BaseModel): + item_id: str = None + item_type: str = None + item_count: int = None + item_description: str = None + item_gross_weight: float = None + item_stone_weight: float = None + item_purity: float = None + model_config = ConfigDict(extra='ignore') + + +class ImageMetadata(BaseModel): + customer_id: str = None + loan_id: str = None + + branch: str = None + city: str = None + region: str = None + zone: str = None + category: str = None + + agent_id: str = None + item_id: str = None # Unique indentifier for gold image + + timestamp: datetime = None + loan_date: datetime = None + gold_loan_category: str = None + loan_tenure: int = None + loan_amount: float = None + + gross_weight: float = None + stone_weight: float = None + net_weight: float = None + jewellery_items_count: int = None + gold_purity: float = None + + items: List[Item] = None + + metadata_1: str = None + metadata_2: str = None + metadata_3: str = None + metadata_4: str = None + metadata_5: str = None + + filter_1: str = None + filter_2: str = None + filter_3: str = None + filter_4: str = None + filter_5: str = None + + model_config = ConfigDict(extra='allow') + + def get_extra_fields(self) -> dict: + """Return a dict of extra fields not defined in the model.""" + return ( + dict(self.__pydantic_extra__) + if hasattr(self, '__pydantic_extra__') and self.__pydantic_extra__ + else {} + ) + + def get_defined_fields(self) -> dict: + """Return a dict of only the defined fields (excluding extras).""" + return self.model_dump(exclude=self.get_extra_fields().keys()) + + def to_string_dict(self) -> dict: + """Return a dict with all fields (excluding extras) as strings. None remains None. All nested values are strings.""" + + def to_str_recursive(val): + if val is None: + return None + if isinstance(val, list): + return [to_str_recursive(v) for v in val] + if isinstance(val, dict): + return {k: to_str_recursive(v) for k, v in val.items()} + return str(val) + + all_fields = {**self.get_defined_fields()} + return {k: to_str_recursive(v) for k, v in all_fields.items()} + + +class ImageAnalysisRequest(BaseModel): + image: str # data URL (base64 with MIME) or direct URL + metadata: ImageMetadata = ( + ImageMetadata() + ) # Ensure metadata is always an ImageMetadata instance diff --git a/wavefront/server/modules/gold_module/gold_module/services/cloud_image_service.py b/wavefront/server/modules/gold_module/gold_module/services/cloud_image_service.py new file mode 100644 index 00000000..404f6d00 --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/services/cloud_image_service.py @@ -0,0 +1,126 @@ +from abc import ABC +from abc import abstractmethod +import json +from typing import Any, Dict, Tuple + +import boto3 +from common_module.log.logger import logger +from google.cloud import pubsub_v1 +from google.cloud import storage + + +class CloudImageService(ABC): + @abstractmethod + async def upload_image(self, image_data: bytes, object_key: str) -> Tuple[str, str]: + pass + + @abstractmethod + async def send_message(self, message: Dict[str, Any]) -> str: + pass + + @abstractmethod + async def upload_image_metadata( + self, image_metadata: bytes, object_key: str + ) -> Tuple[str, str]: + """Upload image metadata to the cloud storage""" + pass + + +class AWSImageService(CloudImageService): + def __init__(self, bucket_name: str, queue_url: str, region: str = 'us-east-1'): + self.bucket_name = bucket_name + self.queue_url = queue_url + self.region = region + + if not self.bucket_name: + raise ValueError('S3 bucket name must be provided for AWS') + if not self.queue_url: + raise ValueError('SQS queue URL must be provided for AWS') + + self.s3_client = boto3.client('s3', region_name=region) + self.sqs_client = boto3.client('sqs', region_name=region) + + async def upload_image(self, image_data: bytes, object_key: str) -> Tuple[str, str]: + """Upload to AWS S3""" + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=object_key, + Body=image_data, + ContentType='image/jpeg', + ) + + return (self.bucket_name, object_key) + + async def send_message(self, message: Dict[str, Any]) -> str: + """Send to AWS SQS""" + response = self.sqs_client.send_message( + QueueUrl=self.queue_url, MessageBody=json.dumps(message) + ) + + message_id = response['MessageId'] + logger.info(f'Successfully sent message to SQS: {message_id}') + return message_id + + async def upload_image_metadata( + self, image_metadata: bytes, object_key: str + ) -> Tuple[str, str]: + """Upload image metadata to AWS S3""" + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=object_key, + Body=image_metadata, + ContentType='application/json', + ) + + return (self.bucket_name, object_key) + + +class GCPImageService(CloudImageService): + def __init__(self, bucket_name: str, project_id: str, topic_id: str): + """ + Args: + bucket_name: Name of the GCS bucket + project_id: GCP project ID + topic_id: Pub/Sub topic ID + """ + self.bucket_name = bucket_name + self.project_id = project_id + self.topic_id = topic_id + + if not self.bucket_name: + raise ValueError('GCS bucket name must be provided for GCP') + if not self.project_id: + raise ValueError('Project ID must be provided for GCP') + if not self.topic_id: + raise ValueError('Topic ID must be provided for GCP') + + self.storage_client = storage.Client() + self.publisher = pubsub_v1.PublisherClient() + self.topic_path = self.publisher.topic_path(self.project_id, self.topic_id) + + async def upload_image(self, image_data: bytes, object_key: str) -> Tuple[str, str]: + """Upload to Google Cloud Storage""" + bucket = self.storage_client.bucket(self.bucket_name) + blob = bucket.blob(object_key) + blob.upload_from_string(image_data, content_type='image/jpeg') + + return (self.bucket_name, object_key) + + async def upload_image_metadata( + self, image_metadata: str, object_key: str + ) -> Tuple[str, str]: + """Upload image data to GCS""" + bucket = self.storage_client.bucket(self.bucket_name) + blob = bucket.blob(object_key) + blob.upload_from_string(image_metadata) + + return (self.bucket_name, object_key) + + async def send_message(self, message: Dict[str, Any]) -> str: + """Send to GCP Pub/Sub""" + data = json.dumps(message).encode('utf-8') + future = self.publisher.publish(self.topic_path, data) + message_id = future.result() + + logger.info(f'Successfully sent message to Pub/Sub: {message_id}') + return message_id diff --git a/wavefront/server/modules/gold_module/gold_module/services/image_service.py b/wavefront/server/modules/gold_module/gold_module/services/image_service.py new file mode 100644 index 00000000..4e6c80ef --- /dev/null +++ b/wavefront/server/modules/gold_module/gold_module/services/image_service.py @@ -0,0 +1,81 @@ +import io +from typing import Any, Dict +import uuid +from datetime import datetime +import json + +from common_module.log.logger import logger +from gold_module.services.cloud_image_service import CloudImageService +from PIL import Image + + +class ImageService: + def __init__(self, cloud_service: CloudImageService): + self.cloud_service = cloud_service + + async def process_image( + self, image_data: bytes, metadata: Dict[str, Any] + ) -> Dict[str, Any]: + try: + validated_image_data = await self._validate_image(image_data) + object_key = metadata.get('item_id') + if object_key is None or object_key == '': + object_key = str(uuid.uuid4()) + metadata['item_id'] = object_key + + bucket_name, file_path = await self.cloud_service.upload_image( + validated_image_data, object_key + ) + + message = { + 'parse_type': 'gold', + 'bucket_name': bucket_name, + 'key': file_path, + 'metadata': self._custom_serializer(metadata), + } + + await self.cloud_service.upload_image_metadata( + image_metadata=json.dumps(message), + object_key=f'gold_image_metadata/{object_key}.json', + ) + + message_id = await self.cloud_service.send_message(message) + + return { + 'status': 'success', + 'message_id': message_id, + } + + except Exception as e: + logger.error(f'Error processing image: {str(e)}') + raise Exception(f'Failed to process image: {str(e)}') + + async def _validate_image(self, image_data: bytes) -> bytes: + try: + with Image.open(io.BytesIO(image_data)) as img: + # Ensure the image is in RGB format + if img.mode != 'RGB': + img = img.convert('RGB') + + buffer = io.BytesIO() + img_format = img.format if img.format else 'JPEG' + img.save(buffer, format=img_format, quality=85) + return buffer.getvalue() + + except Exception as e: + logger.error(f'Error validating image: {str(e)}') + raise ValueError(f'Invalid image data: {str(e)}') + + def _custom_serializer(self, obj): + """Helper method for JSON serialization""" + if obj is None: + return None + if isinstance(obj, datetime): + return obj.isoformat() + if isinstance(obj, dict): + return {k: self._custom_serializer(v) for k, v in obj.items()} + if isinstance(obj, list): + return [self._custom_serializer(item) for item in obj] + if hasattr(obj, 'to_dict'): + return obj.to_dict() + return str(obj) diff --git a/wavefront/server/modules/gold_module/pyproject.toml b/wavefront/server/modules/gold_module/pyproject.toml new file mode 100644 index 00000000..de680ef4 --- /dev/null +++ b/wavefront/server/modules/gold_module/pyproject.toml @@ -0,0 +1,29 @@ +[project] +name = "gold-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "dependency-injector>=4.42.0,<5.0.0", + "fastapi>=0.115.2,<1.0.0", + "google-cloud-storage<3.0.0", + "google-cloud-pubsub>=2.29.0,<3.0.0", +] + +[tool.uv.sources] +common-module = { workspace = true } + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["gold_module"] diff --git a/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py b/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py new file mode 100644 index 00000000..ab92a93f --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py @@ -0,0 +1,111 @@ +from abc import ABC, abstractmethod +from typing import Dict, List, Any +from dataclasses import dataclass +from enum import Enum + + +class AlgorithmType(Enum): + """Supported matching algorithms""" + + SIFT = 'sift' + # SAM_DINOV2 = "sam_dinov2" + + +@dataclass +class MatchResult: + """Standardized match result across all algorithms""" + + algorithm_type: str + reference_id: str + match_score: float + is_match: bool + confidence: float + processing_time_ms: float + metadata: Dict[str, Any] + + def to_dict(self) -> Dict[str, Any]: + return { + 'algorithm_type': self.algorithm_type, + 'reference_id': self.reference_id, + 'match_score': self.match_score, + 'is_match': self.is_match, + 'confidence': self.confidence, + 'processing_time_ms': self.processing_time_ms, + 'metadata': self.metadata, + } + + +@dataclass +class AlgorithmInfo: + """Algorithm metadata and capabilities""" + + name: str + version: str + description: str + supported_formats: List[str] + performance_characteristics: Dict[str, Any] + requirements: Dict[str, Any] + + +class ImageMatchingAlgorithm(ABC): + """Abstract base class for all image matching algorithms""" + + def __init__(self, config: Dict[str, Any]): + self.config = config + self.algorithm_type = self.__class__.__name__.lower().replace('matcher', '') + + @abstractmethod + def extract_features(self, image_bytes: bytes) -> Any: + """ + Extract features from image bytes + + Args: + image_bytes: Raw image data + + Returns: + Algorithm-specific feature representation + """ + pass + + @abstractmethod + def match_against_reference( + self, query_features: Any, reference_features: Any, reference_id: str + ) -> MatchResult: + """ + Match query features against single reference + + Args: + query_features: Features extracted from query image + reference_features: Features from reference image + reference_id: Unique identifier for reference + + Returns: + MatchResult with similarity score and metadata + """ + pass + + @abstractmethod + def batch_match( + self, query_features: Any, reference_features_map: Dict[str, Any] + ) -> List[MatchResult]: + """ + Efficiently match query against multiple references + + Args: + query_features: Features from query image + reference_features_map: Dict of {reference_id: features} + + Returns: + List of MatchResult objects + """ + pass + + @abstractmethod + def get_algorithm_info(self) -> AlgorithmInfo: + """Return algorithm metadata and capabilities""" + pass + + def preprocess_image(self, image_bytes: bytes, target_width: int = 800) -> Any: + """Common image preprocessing logic""" + # This would contain shared preprocessing logic + pass diff --git a/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py b/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py new file mode 100644 index 00000000..fea66a7c --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py @@ -0,0 +1,278 @@ +import cv2 +import numpy as np +import time +from typing import Dict, List, Any +from dataclasses import dataclass + +from image_search_module.algorithms.base import ( + ImageMatchingAlgorithm, + MatchResult, + AlgorithmInfo, +) + + +@dataclass +class SIFTFeatures: + """SIFT-specific feature representation""" + + keypoints: List[cv2.KeyPoint] + descriptors: np.ndarray + image_shape: tuple + + def to_dict(self) -> Dict[str, Any]: + """Serialize for storage""" + return { + 'keypoints': [ + { + 'pt': kp.pt, + 'size': kp.size, + 'angle': kp.angle, + 'response': kp.response, + 'octave': kp.octave, + 'class_id': kp.class_id, + } + for kp in self.keypoints + ], + 'descriptors': self.descriptors.tolist() + if self.descriptors is not None + else None, + 'image_shape': self.image_shape, + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> 'SIFTFeatures': + """Deserialize from storage""" + keypoints = [] + for kp_data in data['keypoints']: + kp = cv2.KeyPoint( + x=kp_data['pt'][0], + y=kp_data['pt'][1], + size=kp_data['size'], + angle=kp_data['angle'], + response=kp_data['response'], + octave=kp_data['octave'], + class_id=kp_data['class_id'], + ) + keypoints.append(kp) + + descriptors = ( + np.array(data['descriptors'], dtype=np.float32) + if data['descriptors'] + else None + ) + return cls( + keypoints=keypoints, + descriptors=descriptors, + image_shape=data['image_shape'], + ) + + +class SIFTMatcher(ImageMatchingAlgorithm): + """SIFT-based image matching implementation""" + + def __init__(self, config: Dict[str, Any]): + super().__init__(config) + self.max_features = config.get('max_features', 5000) + self.lowe_ratio = config.get('lowe_ratio', 0.75) + self.match_threshold = config.get('match_threshold', 10) + self.min_homography_matches = config.get('min_homography_matches', 4) + self.target_width = config.get('target_width', 800) + + self.sift = cv2.SIFT_create(nfeatures=self.max_features) + + def extract_features(self, image_bytes: bytes) -> SIFTFeatures: + """Extract SIFT features from image""" + try: + # Convert bytes to image + nparr = np.frombuffer(image_bytes, np.uint8) + image = cv2.imdecode(nparr, cv2.IMREAD_GRAYSCALE) + + if image is None: + raise ValueError('Could not decode image') + + # Preprocess image + processed_image = self._preprocess_image(image) + + # Extract SIFT features + keypoints, descriptors = self.sift.detectAndCompute(processed_image, None) + + return SIFTFeatures( + keypoints=keypoints, + descriptors=descriptors, + image_shape=processed_image.shape, + ) + + except Exception as e: + raise RuntimeError(f'SIFT feature extraction failed: {e}') + + def match_against_reference( + self, + query_features: SIFTFeatures, + reference_features: SIFTFeatures, + reference_id: str, + ) -> MatchResult: + """Match SIFT features against single reference""" + + start_time = time.perf_counter() + + try: + # Perform feature matching + matches = self._match_features( + query_features.descriptors, reference_features.descriptors + ) + + # Verify with homography if enough matches + inlier_matches, homography, is_valid = self._verify_homography( + query_features.keypoints, reference_features.keypoints, matches + ) + + match_score = len(inlier_matches) + is_match = match_score >= self.match_threshold and is_valid + confidence = min(match_score / (self.match_threshold * 2), 1.0) + + end_time = time.perf_counter() + processing_time_ms = (end_time - start_time) * 1000 + + return MatchResult( + algorithm_type=self.algorithm_type, + reference_id=reference_id, + match_score=match_score, + is_match=is_match, + confidence=confidence, + processing_time_ms=processing_time_ms, + metadata={ + 'total_matches': len(matches), + 'inlier_matches': len(inlier_matches), + 'homography_valid': is_valid, + 'lowe_ratio': self.lowe_ratio, + }, + ) + + except Exception as e: + return MatchResult( + algorithm_type=self.algorithm_type, + reference_id=reference_id, + match_score=0.0, + is_match=False, + confidence=0.0, + processing_time_ms=0.0, + metadata={'error': str(e)}, + ) + + def batch_match( + self, + query_features: SIFTFeatures, + reference_features_map: Dict[str, SIFTFeatures], + ) -> List[MatchResult]: + """Batch match against multiple references""" + + results = [] + for ref_id, ref_features in reference_features_map.items(): + result = self.match_against_reference(query_features, ref_features, ref_id) + results.append(result) + + return results + + def get_algorithm_info(self) -> AlgorithmInfo: + """Return SIFT algorithm information""" + return AlgorithmInfo( + name='SIFT', + version='1.0.0', + description='Scale-Invariant Feature Transform for feature-based matching', + supported_formats=['jpg', 'jpeg', 'png', 'bmp', 'tiff'], + performance_characteristics={ + 'rotation_invariant': True, + 'scale_invariant': True, + 'illumination_robust': True, + 'typical_processing_time_ms': '100-500', + 'memory_usage': 'moderate', + }, + requirements={ + 'opencv': '>=4.8.0', + 'min_image_size': '100x100', + 'recommended_image_size': '800x600', + }, + ) + + def _preprocess_image(self, image: np.ndarray) -> np.ndarray: + """Preprocess image for SIFT""" + # Resize if too large + if image.shape[1] > self.target_width: + scale = self.target_width / image.shape[1] + new_height = int(image.shape[0] * scale) + image = cv2.resize( + image, (self.target_width, new_height), interpolation=cv2.INTER_AREA + ) + + # Apply CLAHE for contrast enhancement + clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) + image = clahe.apply(image) + + # Apply slight Gaussian blur + image = cv2.GaussianBlur(image, (3, 3), 0.5) + + return image + + def _match_features(self, desc1: np.ndarray, desc2: np.ndarray) -> List[cv2.DMatch]: + """Match SIFT descriptors using Lowe's ratio test""" + if desc1 is None or desc2 is None or len(desc1) < 2 or len(desc2) < 2: + return [] + + try: + bf = cv2.BFMatcher() + matches = bf.knnMatch(desc1, desc2, k=2) + + # Apply Lowe's ratio test + good_matches = [] + for match_pair in matches: + if len(match_pair) == 2: + m, n = match_pair + if m.distance < self.lowe_ratio * n.distance: + good_matches.append(m) + + return good_matches + + except Exception: + return [] + + def _verify_homography( + self, + kp1: List[cv2.KeyPoint], + kp2: List[cv2.KeyPoint], + matches: List[cv2.DMatch], + ) -> tuple: + """Verify matches using homography estimation""" + if len(matches) < self.min_homography_matches: + return matches, None, False + + try: + # Extract matched points + src_pts = np.float32([kp1[m.queryIdx].pt for m in matches]).reshape( + -1, 1, 2 + ) + dst_pts = np.float32([kp2[m.trainIdx].pt for m in matches]).reshape( + -1, 1, 2 + ) + + # Find homography + homography, mask = cv2.findHomography( + src_pts, dst_pts, cv2.RANSAC, 5.0, maxIters=5000, confidence=0.995 + ) + + if homography is not None: + # Filter inlier matches + inlier_matches = [matches[i] for i in range(len(matches)) if mask[i]] + + # Check homography quality + det = np.linalg.det(homography[:2, :2]) + is_valid = ( + 0.1 < abs(det) < 10 + and len(inlier_matches) >= self.min_homography_matches + ) + + return inlier_matches, homography, is_valid + + return matches, None, False + + except Exception: + return matches, None, False diff --git a/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py b/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py new file mode 100644 index 00000000..da9d35a7 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py @@ -0,0 +1,159 @@ +from typing import List, Optional +from fastapi import APIRouter, Depends, Query +from fastapi.responses import JSONResponse +from fastapi import status + +from common_module.response_formatter import ResponseFormatter + +from common_module.common_container import CommonContainer +from dependency_injector.wiring import inject, Provide + +from image_search_module.image_search_container import ImageSearchContainer +from image_search_module.services.ikb_service import IKBService +from image_search_module.models.ikb_models import ( + CreateIKBRequest, + IKBInfo, + IKBType, + IKBImageAddRequest, + IKBSearchRequest, + IKBSearchResponse, +) + +image_search_router = APIRouter(prefix='/ikb') + + +# IKB Management Endpoints +@image_search_router.post('/create', response_model=IKBInfo) +@inject +async def create_ikb( + payload: CreateIKBRequest, + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Create a new Image Knowledge Base""" + + ikb_info = await ikb_service.create_ikb(payload) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + ikb_info.model_dump(mode='json') + ), + ) + + +@image_search_router.get('/', response_model=List[IKBInfo]) +@inject +async def list_ikbs( + ikb_type: Optional[IKBType] = Query(None, description='Filter by IKB type'), + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """List all Image Knowledge Bases""" + + ikbs = await ikb_service.list_ikbs(ikb_type=ikb_type) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'ikbs': [ikb.model_dump(mode='json') for ikb in ikbs]} + ), + ) + + +@image_search_router.get('/{ikb_id}', response_model=IKBInfo) +@inject +async def get_ikb( + ikb_id: str, + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Get information about a specific IKB""" + ikb = await ikb_service.get_ikb(ikb_id) + + if not ikb: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'IKB with ID {ikb_id} not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(ikb.model_dump(mode='json')), + ) + + +# Image add and Search Endpoints +@image_search_router.post('/{ikb_id}/add') +@inject +async def add_image_to_ikb( + ikb_id: str, + payload: IKBImageAddRequest, + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """add an image to a specific IKB""" + result = await ikb_service.add_image_to_ikb(ikb_id, payload) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse(result), + ) + + +@image_search_router.post('/{ikb_id}/search', response_model=IKBSearchResponse) +@inject +async def search_in_ikb( + ikb_id: str, + payload: IKBSearchRequest, + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Search for similar images within a specific IKB""" + result = await ikb_service.search_in_ikb(ikb_id, payload) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(result.dict()), + ) + + +@image_search_router.delete('/{ikb_id}') +@inject +async def delete_ikb( + ikb_id: str, + ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """Delete an IKB""" + + success = await ikb_service.delete_ikb(ikb_id) + + if not success: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'IKB with ID {ikb_id} not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'IKB deleted successfully'} + ), + ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py b/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py new file mode 100644 index 00000000..1a9988e5 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py @@ -0,0 +1,96 @@ +from dependency_injector import containers, providers +from image_search_module.services.image_matching_service import ImageMatchingService +from image_search_module.services.reference_image_service import ReferenceImageService +from image_search_module.services.algorithm_factory import AlgorithmFactory +from image_search_module.services.algorithm_service import AlgorithmService +from image_search_module.services.ikb_service import IKBService +from image_search_module.algorithms.base import AlgorithmType +from image_search_module.repositories.sift_features_repository import ( + SIFTFeaturesRepository, +) +from db_repo_module.models.image_search_models import ( + ReferenceImageFeatures, + SIFTFeatures, +) +from db_repo_module.models.ikb_models import ImageKnowledgeBase +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from image_search_module.repositories.ikb_repository import IKBRepository +import os +import yaml + + +class ImageSearchContainer(containers.DeclarativeContainer): + """Dependency injection container for image search module""" + + _container_dir = os.path.dirname(os.path.abspath(__file__)) + _config_path = os.path.join(_container_dir, 'config', 'algorithm_configs.yaml') + + with open(_config_path, 'r') as file: + config = yaml.safe_load(file) + + cloud_configs = providers.Configuration(ini_files=['config.ini']) + + cloud_storage_manager = providers.Dependency() + + db_client = providers.Dependency() + + active_algorithm_type = providers.Factory( + AlgorithmType, config['service']['active_algorithm'] + ) + + reference_features_repository = providers.Singleton( + SQLAlchemyRepository[ReferenceImageFeatures], + model=ReferenceImageFeatures, + db_client=db_client, + ) + + ikb_repository_db = providers.Singleton( + SQLAlchemyRepository[ImageKnowledgeBase], + model=ImageKnowledgeBase, + db_client=db_client, + ) + + ikb_repository = providers.Singleton( + IKBRepository, + db_repository=ikb_repository_db, + ) + + sift_features_repository = providers.Singleton( + SIFTFeaturesRepository, + model=SIFTFeatures, + db_client=db_client, + ) + + # Core services + algorithm_factory = providers.Singleton(AlgorithmFactory) + + algorithm_service = providers.Singleton( + AlgorithmService, algorithm_factory=algorithm_factory + ) + + reference_image_service = providers.Singleton( + ReferenceImageService, + features_repository=reference_features_repository, + sift_features_repository=sift_features_repository, + algorithm_service=algorithm_service, + cloud_storage_manager=cloud_storage_manager, + bucket_name=cloud_configs.image_search.reference_images_bucket, + ) + + # Main image matching service + image_matching_service = providers.Singleton( + ImageMatchingService, + algorithm_factory=algorithm_factory, + reference_service=reference_image_service, + active_algorithm_type=active_algorithm_type, + algorithm_config=config['algorithms'], + max_results=config['service']['max_results'], + ) + + # IKB service + ikb_service = providers.Singleton( + IKBService, + image_matching_service=image_matching_service, + reference_image_service=reference_image_service, + ikb_repository=ikb_repository, + ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py b/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py new file mode 100644 index 00000000..45a540e2 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py @@ -0,0 +1,126 @@ +from typing import List, Optional, Dict, Any +from pydantic import BaseModel, Field, field_validator +from datetime import datetime +from enum import Enum + +from image_search_module.algorithms.base import AlgorithmType + + +class IKBStatus(str, Enum): + """Status of an Image Knowledge Base""" + + ACTIVE = 'active' + INACTIVE = 'inactive' + + +class IKBType(str, Enum): + """Types of Image Knowledge Bases""" + + GOLD_MATCHING = 'gold_matching' + PHOTO_MATCHING = 'photo_matching' + + +class CreateIKBRequest(BaseModel): + """Request to create a new Image Knowledge Base""" + + name: str = Field(..., description='Name of the IKB', min_length=1, max_length=100) + description: Optional[str] = Field( + None, description='Description of the IKB', max_length=500 + ) + ikb_type: IKBType = Field(..., description='Type of the IKB') + algorithm_type: AlgorithmType = Field( + ..., description='Algorithm to use for this IKB' + ) + config: Dict[str, Any] = Field( + default_factory=dict, description='Algorithm-specific configuration(required)' + ) + + @field_validator('name') + @classmethod + def validate_name(cls, v): + if not v.strip(): + raise ValueError('Name cannot be empty') + return v.strip() + + +class IKBInfo(BaseModel): + """Information about an Image Knowledge Base""" + + ikb_id: str = Field(..., description='Unique identifier for the IKB') + name: str = Field(..., description='Name of the IKB') + description: Optional[str] = Field(None, description='Description of the IKB') + ikb_type: IKBType = Field(..., description='Type of the IKB') + algorithm_type: AlgorithmType = Field(..., description='Algorithm used by this IKB') + status: IKBStatus = Field(..., description='Current status of the IKB') + image_count: int = Field(0, description='Number of images in this IKB') + created_at: datetime = Field(..., description='When the IKB was created') + updated_at: datetime = Field(..., description='When the IKB was last updated') + config: Dict[str, Any] = Field( + default_factory=dict, description='Algorithm-specific configuration' + ) + + +class IKBImageAddRequest(BaseModel): + """Request to add an image to a specific IKB""" + + image_data: str = Field(..., description='Base64 encoded image data URL') + reference_id: Optional[str] = Field( + None, description='Custom reference ID (auto-generated if not provided)' + ) + metadata: Optional[Dict[str, Any]] = Field( + default_factory=dict, description='Additional metadata for the image' + ) + + @field_validator('image_data') + @classmethod + def validate_image_data(cls, v): + """Validate that image_data is a proper base64 data URL""" + if not v.startswith('data:image/'): + raise ValueError( + 'Image data must be a base64 data URL (data:image/...;base64,...)' + ) + + if ';base64,' not in v: + raise ValueError('Image data must be base64 encoded') + + return v + + +class IKBSearchRequest(BaseModel): + """Request to search within a specific IKB""" + + image_data: str = Field(..., description='Base64 encoded image data URL') + max_results: int = Field( + 10, description='Maximum number of results to return', ge=1, le=100 + ) + threshold: Optional[float] = Field(None, description='Minimum similarity threshold') + + @field_validator('image_data') + @classmethod + def validate_image_data(cls, v): + """Validate that image_data is a proper base64 data URL""" + if not v.startswith('data:image/'): + raise ValueError( + 'Image data must be a base64 data URL (data:image/...;base64,...)' + ) + + if ';base64,' not in v: + raise ValueError('Image data must be base64 encoded') + + return v + + +class IKBSearchResponse(BaseModel): + """Response from IKB search""" + + query_id: str = Field(..., description='Unique identifier for this search query') + ikb_id: str = Field(..., description='ID of the IKB that was searched') + ikb_name: str = Field(..., description='Name of the IKB that was searched') + algorithm_used: str = Field(..., description='Algorithm used for matching') + matches: List[Dict[str, Any]] = Field(..., description='List of matching results') + total_images_searched: int = Field( + ..., description='Total number of images in the IKB' + ) + processing_time_ms: float = Field( + ..., description='Total processing time in milliseconds' + ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py b/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py new file mode 100644 index 00000000..72fb9a52 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py @@ -0,0 +1,83 @@ +from typing import List, Optional, Dict, Any +from pydantic import BaseModel, Field, validator +import base64 +import re + + +class ImageSearchRequest(BaseModel): + """Request model for image search with base64 data URL""" + + image_data: str = Field( + ..., description='Base64 encoded image data URL (data:image/...;base64,...)' + ) + algorithm_type: Optional[str] = Field( + None, description='Algorithm type to use (sift, sam_dinov2, custom_model)' + ) + + @validator('image_data') + def validate_image_data(cls, v): + """Validate that image_data is a proper base64 data URL""" + if not v.startswith('data:image/'): + raise ValueError( + 'Image data must be a base64 data URL (data:image/...;base64,...)' + ) + + if ';base64,' not in v: + raise ValueError('Image data must be base64 encoded') + + # Extract and validate base64 data + try: + data_url_pattern = r'^data:(image/\w+);base64,(.+)' + match = re.match(data_url_pattern, v) + if not match: + raise ValueError('Invalid data URL format') + + # Decode to check size and validity + base64_data = match.group(2) + image_bytes = base64.b64decode(base64_data) + + # Check size limit (20MB original = ~26.6MB base64) + MAX_SIZE = 20 * 1024 * 1024 # 20MB + if len(image_bytes) > MAX_SIZE: + raise ValueError( + f'Image too large. Maximum size: {MAX_SIZE // (1024*1024)}MB' + ) + + return v + + except base64.binascii.Error: + raise ValueError('Invalid base64 encoding') + except Exception as e: + raise ValueError(f'Invalid image data: {str(e)}') + + @validator('algorithm_type') + def validate_algorithm_type(cls, v): + """Validate algorithm type if provided""" + if v is not None: + valid_types = ['sift'] + if v not in valid_types: + raise ValueError( + f'Invalid algorithm type. Must be one of: {valid_types}' + ) + return v + + +class MatchResult(BaseModel): + """Individual match result""" + + algorithm_type: str + reference_id: str + match_score: float + is_match: bool + confidence: float + processing_time_ms: float + metadata: Dict[str, Any] + + +class ImageSearchResponse(BaseModel): + """Response model for image search""" + + query_id: str + matches: List[MatchResult] + algorithm_used: str + processing_time_ms: float diff --git a/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py b/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py new file mode 100644 index 00000000..e619a39d --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py @@ -0,0 +1,76 @@ +from typing import List, Optional +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.ikb_models import ImageKnowledgeBase +from image_search_module.models.ikb_models import IKBInfo, IKBType, IKBStatus +from image_search_module.algorithms.base import AlgorithmType + + +class IKBRepository: + """Repository for Image Knowledge Base operations""" + + def __init__(self, db_repository: SQLAlchemyRepository[ImageKnowledgeBase]): + self.db_repository = db_repository + + async def create_ikb(self, ikb_info: IKBInfo) -> ImageKnowledgeBase: + """Create a new IKB in the database""" + return await self.db_repository.create( + ikb_id=ikb_info.ikb_id, + name=ikb_info.name, + description=ikb_info.description, + ikb_type=ikb_info.ikb_type.value, + algorithm_type=ikb_info.algorithm_type.value, + status=ikb_info.status.value, + config=ikb_info.config, + image_count=ikb_info.image_count, + ) + + async def get_ikb(self, ikb_id: str) -> Optional[ImageKnowledgeBase]: + """Get IKB by ID""" + return await self.db_repository.find_one(ikb_id=ikb_id) + + async def list_ikbs( + self, ikb_type: Optional[IKBType] = None + ) -> List[ImageKnowledgeBase]: + """List all IKBs, optionally filtered by type""" + filters = {} + if ikb_type: + filters['ikb_type'] = ikb_type.value + + return await self.db_repository.find(**filters) + + async def update_ikb(self, ikb_id: str, **updates) -> Optional[ImageKnowledgeBase]: + """Update IKB""" + # Use find_one_and_update method + filters = {'ikb_id': ikb_id} + return await self.db_repository.find_one_and_update(filters, **updates) + + async def delete_ikb(self, ikb_id: str) -> bool: + """Delete IKB""" + # Use delete_all method with filter + await self.db_repository.delete_all(ikb_id=ikb_id) + return True + + async def increment_image_count(self, ikb_id: str) -> bool: + """Increment the image count for an IKB""" + # Get current IKB + ikb = await self.get_ikb(ikb_id) + if ikb: + # Update with incremented count + await self.update_ikb(ikb_id, image_count=ikb.image_count + 1) + return True + return False + + def _convert_to_ikb_info(self, ikb_db: ImageKnowledgeBase) -> IKBInfo: + """Convert database model to IKBInfo""" + return IKBInfo( + ikb_id=ikb_db.ikb_id, + name=ikb_db.name, + description=ikb_db.description, + ikb_type=IKBType(ikb_db.ikb_type), + algorithm_type=AlgorithmType(ikb_db.algorithm_type), + status=IKBStatus(ikb_db.status), + image_count=ikb_db.image_count, + created_at=ikb_db.created_at, + updated_at=ikb_db.updated_at, + config=ikb_db.config, + ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py b/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py new file mode 100644 index 00000000..487c1e2f --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py @@ -0,0 +1,52 @@ +from typing import List +from db_repo_module.models.image_search_models import ReferenceImageFeatures +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.image_search_models import SIFTFeatures +from sqlalchemy import select + + +class SIFTFeaturesRepository(SQLAlchemyRepository[SIFTFeatures]): + """Repository for SIFT features""" + + async def create_sift_features( + self, + reference_image_id: str, + keypoints: List[dict], + descriptors: List[List[float]], + ) -> List[SIFTFeatures]: + """Create SIFT features for a reference image""" + sift_features = [] + + for i, (keypoint, descriptor) in enumerate(zip(keypoints, descriptors)): + # Create the feature using the parent's create method with keyword arguments + feature = await self.create( + reference_image_id=reference_image_id, + keypoint_id=i, # Ensure sequential ordering + x=keypoint['pt'][0], + y=keypoint['pt'][1], + size=keypoint['size'], + angle=keypoint['angle'], + response=keypoint['response'], + octave=keypoint['octave'], + class_id=keypoint['class_id'], + descriptor=descriptor, + ) + sift_features.append(feature) + + return sift_features + + async def get_features_by_ikb(self, ikb_id: str) -> List[SIFTFeatures]: + """Get SIFT features only from specific IKB""" + async with self.session() as session: + stmt = ( + select(SIFTFeatures) + .join( + ReferenceImageFeatures, + SIFTFeatures.reference_image_id + == ReferenceImageFeatures.reference_image_id, + ) + .where(ReferenceImageFeatures.ikb_id == ikb_id) + ) + + result = await session.execute(stmt) + return result.scalars().all() diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py new file mode 100644 index 00000000..b61bc47c --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py @@ -0,0 +1,36 @@ +from typing import Dict, Any +from image_search_module.algorithms.base import ImageMatchingAlgorithm, AlgorithmType +from image_search_module.algorithms.sift_matcher import SIFTMatcher + + +class AlgorithmFactory: + """Factory for creating algorithm instances""" + + def __init__(self): + self._algorithms = { + AlgorithmType.SIFT: SIFTMatcher, + # Add other algorithms here as you implement them + } + + def create_algorithm( + self, algorithm_type: AlgorithmType, config: Dict[str, Any] + ) -> ImageMatchingAlgorithm: + """ + Create an algorithm instance + + Args: + algorithm_type: Type of algorithm to create + config: Configuration for the algorithm + + Returns: + Algorithm instance + """ + if algorithm_type not in self._algorithms: + raise ValueError(f'Unsupported algorithm type: {algorithm_type}') + + algorithm_class = self._algorithms[algorithm_type] + return algorithm_class(config) + + def get_supported_algorithms(self) -> list: + """Get list of supported algorithm types""" + return list(self._algorithms.keys()) diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py new file mode 100644 index 00000000..e703f55b --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py @@ -0,0 +1,37 @@ +from typing import Dict, Any +from image_search_module.services.algorithm_factory import AlgorithmFactory +from image_search_module.algorithms.base import AlgorithmType + + +class AlgorithmService: + """Service for algorithm-specific operations""" + + def __init__(self, algorithm_factory: AlgorithmFactory): + self.algorithm_factory = algorithm_factory + + def extract_features( + self, image_bytes: bytes, algorithm_type: str + ) -> Dict[str, Any]: + """Extract features using specified algorithm""" + # Convert string to enum + algo_enum = AlgorithmType(algorithm_type.lower()) + + # Create algorithm instance + algorithm = self.algorithm_factory.create_algorithm(algo_enum, {}) + + # Extract features + features = algorithm.extract_features(image_bytes) + + # Convert to serializable format + if hasattr(features, 'to_dict'): + features_dict = features.to_dict() + else: + features_dict = {'features': features} + + return { + 'features': features_dict, + 'algorithm_type': algorithm_type, + 'feature_count': len(features.keypoints) + if hasattr(features, 'keypoints') + else 0, + } diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py new file mode 100644 index 00000000..d2af6160 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py @@ -0,0 +1,191 @@ +from typing import List, Dict, Any, Optional +import uuid +import base64 +import re +from datetime import datetime +from common_module.log.logger import logger + +from image_search_module.models.ikb_models import ( + CreateIKBRequest, + IKBInfo, + IKBStatus, + IKBType, + IKBImageAddRequest, + IKBSearchRequest, + IKBSearchResponse, +) +from image_search_module.services.image_matching_service import ImageMatchingService +from image_search_module.services.reference_image_service import ReferenceImageService +from image_search_module.repositories.ikb_repository import IKBRepository + + +class IKBService: + """Production-ready service for managing Image Knowledge Bases""" + + def __init__( + self, + image_matching_service: ImageMatchingService, + reference_image_service: ReferenceImageService, + ikb_repository: IKBRepository, + ): + self.image_matching_service = image_matching_service + self.reference_image_service = reference_image_service + self.ikb_repository = ikb_repository + + async def create_ikb(self, payload: CreateIKBRequest) -> IKBInfo: + """Create a new Image Knowledge Base""" + ikb_id = str(uuid.uuid4()) + + ikb_info = IKBInfo( + ikb_id=ikb_id, + name=payload.name, + description=payload.description, + ikb_type=payload.ikb_type, + algorithm_type=payload.algorithm_type, + status=IKBStatus.ACTIVE, + image_count=0, + created_at=datetime.utcnow(), + updated_at=datetime.utcnow(), + config=payload.config or {}, + ) + + await self.ikb_repository.create_ikb(ikb_info) + + logger.info(f'Created new IKB: {ikb_info.name} (ID: {ikb_id})') + return ikb_info + + async def get_ikb(self, ikb_id: str) -> Optional[IKBInfo]: + """Get information about a specific IKB""" + ikb_db = await self.ikb_repository.get_ikb(ikb_id) + if not ikb_db: + return None + + return self.ikb_repository._convert_to_ikb_info(ikb_db) + + async def list_ikbs(self, ikb_type: Optional[IKBType] = None) -> List[IKBInfo]: + """List all IKBs, optionally filtered by type""" + ikb_dbs = await self.ikb_repository.list_ikbs(ikb_type=ikb_type) + + return [self.ikb_repository._convert_to_ikb_info(ikb_db) for ikb_db in ikb_dbs] + + async def update_ikb(self, ikb_id: str, **updates) -> Optional[IKBInfo]: + """Update an IKB""" + ikb_db = await self.ikb_repository.update_ikb(ikb_id, **updates) + if not ikb_db: + return None + + logger.info(f'Updated IKB: {ikb_db.name} (ID: {ikb_id})') + return self.ikb_repository._convert_to_ikb_info(ikb_db) + + async def delete_ikb(self, ikb_id: str) -> bool: + """Delete an IKB""" + success = await self.ikb_repository.delete_ikb(ikb_id) + if success: + logger.info(f'Deleted IKB (ID: {ikb_id})') + return success + + async def add_image_to_ikb( + self, ikb_id: str, payload: IKBImageAddRequest + ) -> Dict[str, Any]: + """add an image to a specific IKB""" + ikb = await self.get_ikb(ikb_id) + if not ikb: + raise ValueError(f'IKB with ID {ikb_id} not found') + + if ikb.status != IKBStatus.ACTIVE: + raise ValueError(f'IKB {ikb.name} is not active (status: {ikb.status})') + + # Decode base64 image + data_url_pattern = r'^data:(image/\w+);base64,(.+)' + match = re.match(data_url_pattern, payload.image_data) + if not match: + raise ValueError('Invalid image data format') + + image_bytes = base64.b64decode(match.group(2)) + + # Generate reference ID if not provided + reference_id = payload.reference_id or str(uuid.uuid4()) + + # Add reference image with IKB ID + result = await self.reference_image_service.add_reference_image( + image_bytes=image_bytes, + reference_image_id=reference_id, + algorithm_type=ikb.algorithm_type.value, + ikb_id=ikb_id, + metadata={ + **payload.metadata, + 'ikb_id': ikb_id, + 'ikb_name': ikb.name, + 'ikb_type': ikb.ikb_type.value, + }, + ) + + # Update IKB image count in database + await self.ikb_repository.increment_image_count(ikb_id) + + logger.info(f'added image to IKB {ikb.name}: {reference_id}') + + return { + 'reference_id': reference_id, + 'ikb_id': ikb_id, + 'ikb_name': ikb.name, + 'algorithm_type': ikb.algorithm_type.value, + 'extraction_results': result, + } + + async def search_in_ikb( + self, ikb_id: str, payload: IKBSearchRequest + ) -> IKBSearchResponse: + """Search for similar images within a specific IKB""" + ikb = await self.get_ikb(ikb_id) + if not ikb: + raise ValueError(f'IKB with ID {ikb_id} not found') + + if ikb.status != IKBStatus.ACTIVE: + raise ValueError(f'IKB {ikb.name} is not active (status: {ikb.status})') + + # Decode base64 image + data_url_pattern = r'^data:(image/\w+);base64,(.+)' + match = re.match(data_url_pattern, payload.image_data) + if not match: + raise ValueError('Invalid image data format') + + image_bytes = base64.b64decode(match.group(2)) + + # Generate query ID + query_id = str(uuid.uuid4()) + + # Perform matching using the IKB's algorithm + matching_result = await self.image_matching_service.match_image( + image_bytes=image_bytes, + ikb_id=ikb_id, + max_results=payload.max_results, + algorithm_type=ikb.algorithm_type, + ) + + # Filter results to only include images from this IKB + ikb_matches = [] + for match in matching_result: + # Get the reference image from database to check IKB ID + reference_image = ( + await self.reference_image_service.features_repository.find_one( + reference_image_id=match.reference_id + ) + ) + + if reference_image and reference_image.ikb_id == ikb_id: + ikb_matches.append(match.to_dict()) + + response = IKBSearchResponse( + query_id=query_id, + ikb_id=ikb_id, + ikb_name=ikb.name, + algorithm_used=ikb.algorithm_type.value, + matches=[match.to_dict() for match in matching_result], + total_images_searched=ikb.image_count, + processing_time_ms=sum(m.processing_time_ms for m in matching_result), + ) + + logger.info(f'Searched IKB {ikb.name}: found {len(ikb_matches)} matches') + + return response diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py new file mode 100644 index 00000000..1d669908 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py @@ -0,0 +1,104 @@ +from typing import List, Dict, Any, Optional +from common_module.log.logger import logger + +from image_search_module.algorithms.base import ( + ImageMatchingAlgorithm, + MatchResult, + AlgorithmType, +) +from image_search_module.services.algorithm_factory import AlgorithmFactory +from image_search_module.services.reference_image_service import ReferenceImageService + + +class ImageMatchingService: + """Main service for image matching operations""" + + def __init__( + self, + algorithm_factory: AlgorithmFactory, + reference_service: ReferenceImageService, + active_algorithm_type: AlgorithmType, + algorithm_config: Dict[str, Any], + max_results: int = 10, + ): + self.algorithm_factory = algorithm_factory + self.reference_service = reference_service + self.active_algorithm_type = active_algorithm_type + self.algorithm_config = algorithm_config + self.max_results = max_results + + # Initialize active algorithm + self.active_algorithm = self._create_active_algorithm() + + def _create_active_algorithm(self) -> ImageMatchingAlgorithm: + """Create the currently active algorithm instance""" + algo_config = self.algorithm_config.get(self.active_algorithm_type.value, {}) + return self.algorithm_factory.create_algorithm( + self.active_algorithm_type, algo_config + ) + + async def match_image( + self, + image_bytes: bytes, + ikb_id: str, + threshold: Optional[float] = None, + max_results: Optional[int] = None, + algorithm_type: Optional[AlgorithmType] = None, + ) -> List[MatchResult]: + """ + Main image matching method + """ + + # Use provided values or defaults + max_results = max_results or self.max_results + algorithm = ( + self.active_algorithm + if algorithm_type is None + else self.algorithm_factory.create_algorithm( + algorithm_type, self.algorithm_config.get(algorithm_type.value, {}) + ) + ) + + logger.info(f'Starting image matching with {algorithm.__class__.__name__}') + + # Extract features from query image + query_features = algorithm.extract_features(image_bytes) + logger.info('Query features extracted successfully') + + # Get reference features for this algorithm + algorithm_type_str = ( + algorithm_type.value if algorithm_type else self.active_algorithm_type.value + ) + reference_features = await self.reference_service.get_reference_features( + algorithm_type=algorithm_type_str, ikb_id=ikb_id + ) + logger.info(f'Retrieved {len(reference_features)} reference features') + + # Perform batch matching + all_matches = algorithm.batch_match(query_features, reference_features) + logger.info(f'Completed matching, found {len(all_matches)} comparisons') + + # Filter by threshold and sort + valid_matches = [match for match in all_matches if match.is_match] + + sorted_matches = sorted( + valid_matches, key=lambda x: x.match_score, reverse=True + )[:max_results] + + logger.info( + f'Returning {len(sorted_matches)} matches above threshold {threshold}' + ) + + return sorted_matches + + def get_algorithm_info( + self, algorithm_type: Optional[AlgorithmType] = None + ) -> Dict[str, Any]: + """Get information about an algorithm""" + + algo_type = algorithm_type or self.active_algorithm_type + algorithm = self.algorithm_factory.create_algorithm( + algo_type, self.algorithm_config.get(algo_type.value, {}) + ) + + return algorithm.get_algorithm_info().to_dict() diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py new file mode 100644 index 00000000..6622c504 --- /dev/null +++ b/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py @@ -0,0 +1,244 @@ +from typing import List, Dict, Any, Optional +import cv2 +import numpy as np +from common_module.log.logger import logger + +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.image_search_models import ReferenceImageFeatures +from image_search_module.repositories.sift_features_repository import ( + SIFTFeaturesRepository, +) +from flo_cloud.cloud_storage import CloudStorageManager +from image_search_module.services.algorithm_service import AlgorithmService +from image_search_module.algorithms.sift_matcher import SIFTFeatures + + +class ReferenceImageService: + def __init__( + self, + features_repository: SQLAlchemyRepository[ReferenceImageFeatures], + sift_features_repository: SIFTFeaturesRepository, + algorithm_service: AlgorithmService, + cloud_storage_manager: CloudStorageManager, + bucket_name: str, + ): + self.cloud_storage_manager = cloud_storage_manager + self.features_repository = features_repository + self.sift_features_repository = sift_features_repository + self.algorithm_service = algorithm_service + self.cloud_storage_manager = cloud_storage_manager + self.bucket_name = bucket_name + + logger.info('ReferenceImageService initialized') + + async def add_reference_image( + self, + image_bytes: bytes, + reference_image_id: str, + algorithm_type: str = 'sift', + ikb_id: str = None, + metadata: dict = None, + ) -> Dict[str, Any]: + """ + Add a new reference image and extract features for specified algorithms + """ + + self.cloud_storage_manager.save_small_file( + file_content=image_bytes, + bucket_name=self.bucket_name, + key=reference_image_id, + ) + + logger.info(f'Uploaded reference image {reference_image_id} to cloud storage') + + # Extract features for the algorithm + extraction_results = {} + + features_data = self.algorithm_service.extract_features( + image_bytes, algorithm_type + ) + + # Store features in database + await self.features_repository.create( + reference_image_id=reference_image_id, + ikb_id=ikb_id, + algorithm_type=algorithm_type, + image_url=reference_image_id, + image_metadata=metadata or {}, + ) + + if algorithm_type.lower() == 'sift': + await self._store_sift_features(reference_image_id, features_data) + + extraction_results[algorithm_type] = { + 'status': 'success', + 'features_count': features_data.get('feature_count', 0), + 'extraction_time_ms': features_data.get('extraction_time_ms', 0), + } + + logger.info( + f'Extracted features for {reference_image_id} using {algorithm_type}' + ) + + return { + 'reference_image_id': reference_image_id, + 'algorithm_type': algorithm_type, + 'features_count': features_data.get('feature_count', 0), + 'stored_in': [ + 'ReferenceImageFeatures', + f'{algorithm_type.title()}Features', + ], + } + + async def _store_sift_features(self, reference_image_id: str, features_data: dict): + """Store SIFT features in the dedicated SIFTFeatures table""" + keypoints = features_data.get('features', {}).get('keypoints', []) + descriptors = features_data.get('features', {}).get('descriptors', []) + + await self.sift_features_repository.create_sift_features( + reference_image_id=reference_image_id, + keypoints=keypoints, + descriptors=descriptors, + ) + + logger.info(f'Stored {len(keypoints)} SIFT keypoints for {reference_image_id}') + + async def get_reference_features( + self, algorithm_type: str, ikb_id: str + ) -> Dict[str, SIFTFeatures]: + """ + Get all reference features for a specific algorithm type + + Args: + algorithm_type: Type of algorithm + + Returns: + Dictionary mapping reference_image_id to SIFTFeatures objects + """ + + if algorithm_type.lower() == 'sift': + # Get SIFT features from dedicated table + sift_features = await self.sift_features_repository.get_features_by_ikb( + ikb_id + ) + + # Group features by reference_image_id and sort by keypoint_id + grouped_features = {} + for feature in sift_features: + ref_id = feature.reference_image_id + + if ref_id not in grouped_features: + grouped_features[ref_id] = { + 'keypoints': [], + 'descriptors': [], + 'keypoint_data': [], # Store (keypoint_id, feature) pairs for sorting + } + + # Store keypoint data with ID for proper ordering + grouped_features[ref_id]['keypoint_data'].append( + ( + feature.keypoint_id, + { + 'pt': [feature.x, feature.y], + 'size': feature.size, + 'angle': feature.angle, + 'response': feature.response, + 'octave': feature.octave, + 'class_id': feature.class_id, + }, + feature.descriptor, + ) + ) + + # Convert to SIFTFeatures objects + sift_features_dict = {} + for ref_id, data in grouped_features.items(): + # Sort by keypoint_id to maintain order + sorted_data = sorted(data['keypoint_data'], key=lambda x: x[0]) + + # Extract keypoints and descriptors in correct order + keypoints = [] + descriptors = [] + + for keypoint_id, keypoint_data, descriptor in sorted_data: + # Create OpenCV KeyPoint object + kp = cv2.KeyPoint( + x=keypoint_data['pt'][0], + y=keypoint_data['pt'][1], + size=keypoint_data['size'], + angle=keypoint_data['angle'], + response=keypoint_data['response'], + octave=keypoint_data['octave'], + class_id=keypoint_data['class_id'], + ) + keypoints.append(kp) + descriptors.append(descriptor) + + # Convert descriptors to numpy array + + descriptors_array = np.array(descriptors, dtype=np.float32) + + # Create SIFTFeatures object with correct types + sift_features_dict[ref_id] = SIFTFeatures( + keypoints=keypoints, + descriptors=descriptors_array, + image_shape=(800, 600), # Default shape + ) + + logger.info( + f'Retrieved {len(sift_features_dict)} reference features for {algorithm_type}' + ) + return sift_features_dict + + else: + # Handle other algorithm types + logger.warning(f'Algorithm type {algorithm_type} not implemented yet') + return {} + + async def delete_reference_image( + self, reference_image_id: str, algorithm_types: Optional[List[str]] = None + ) -> Dict[str, Any]: + """ + Delete a reference image and its features + + Args: + reference_image_id: ID of the reference image + algorithm_types: Optional list of algorithm types to delete features for + + Returns: + Dictionary with deletion results + """ + # Delete from cloud storage + await self.cloud_storage_manager.delete_file( + bucket_name=self.bucket_name, file_path=reference_image_id + ) + + # Delete features from database + deleted_features = await self.features_repository.delete_by_reference_id( + reference_image_id, + algorithm_types[0] + if algorithm_types and len(algorithm_types) == 1 + else None, + ) + + result = { + 'reference_image_id': reference_image_id, + 'deleted_features_count': deleted_features, + 'deleted_from_storage': True, + } + + logger.info(f'Successfully deleted reference image {reference_image_id}') + return result + + async def ensure_features_available(self, algorithm_type: str) -> bool: + features = await self.get_reference_features(algorithm_type) + is_available = len(features) > 0 + + if not is_available: + logger.warning(f'No reference features available for {algorithm_type}') + else: + logger.info( + f'Features available for {algorithm_type}: {len(features)} references' + ) + + return is_available diff --git a/wavefront/server/modules/image_search_module/pyproject.toml b/wavefront/server/modules/image_search_module/pyproject.toml new file mode 100644 index 00000000..8dc3103d --- /dev/null +++ b/wavefront/server/modules/image_search_module/pyproject.toml @@ -0,0 +1,46 @@ +[project] +name = "image-search-module" +version = "0.1.0" +description = "Generic image search and matching module" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "db-repo-module", + "flo-cloud", + "opencv-python>=4.8.0", + "numpy>=1.24.0", + "pillow>=10.0.0", + "scikit-learn>=1.3.0", + "pytest>=8.4.1", + "pytest-asyncio>=0.26.0", +] + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } +flo-cloud = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "pytest-mock>=3.12.0", +] + + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["image_search_module"] diff --git a/wavefront/server/modules/image_search_module/tests/conftest.py b/wavefront/server/modules/image_search_module/tests/conftest.py new file mode 100644 index 00000000..428fba00 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/conftest.py @@ -0,0 +1,35 @@ +import pytest +import os +from pathlib import Path + + +from db_repo_module.database.connection import DatabaseClient, DatabaseConfig + + +@pytest.fixture(scope='session') +async def db_client(): + """Create database client for testing""" + db_config = DatabaseConfig( + username=os.getenv('DB_USERNAME', 'test_user'), + password=os.getenv('DB_PASSWORD', 'test_password'), + host=os.getenv('DB_HOST', 'localhost'), + port=os.getenv('DB_PORT', '5432'), + db_name=os.getenv('DB_NAME', 'test_db'), + ) + + db_client = DatabaseClient(db_config) + await db_client.connect() + yield db_client + await db_client.close() # Fix: Use correct method name + + +@pytest.fixture +def test_image_base64(): + """Provide a test image as base64 data URL""" + return 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + + +@pytest.fixture +def test_images_dir(): + """Provide path to test images directory""" + return Path(__file__).parent / 'test_images' diff --git a/wavefront/server/modules/image_search_module/tests/db_setup.py b/wavefront/server/modules/image_search_module/tests/db_setup.py new file mode 100644 index 00000000..79431d41 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/db_setup.py @@ -0,0 +1,123 @@ +""" +Database setup utilities for IKB functionality. +Can be imported and used in tests or other scripts. +""" + +from typing import Optional +from db_repo_module.database.connection import DatabaseClient, DatabaseConfig +from db_repo_module.database.base import Base +from sqlalchemy import text + + +async def ensure_tables_exist(db_client: DatabaseClient) -> None: + """ + Ensure all IKB-related tables exist in the database with the correct schema. + This will drop and recreate tables to ensure they have the latest schema. + + Args: + db_client: DatabaseClient instance + """ + async with db_client._engine.begin() as connection: + # Drop existing tables in reverse order (due to foreign key constraints) + tables_to_drop = [ + 'sift_features', + 'reference_image_features', + 'image_knowledge_bases', + ] + + for table in tables_to_drop: + await connection.execute(text(f'DROP TABLE IF EXISTS {table} CASCADE;')) + + # Create all tables with the latest schema + await connection.run_sync(Base.metadata.create_all) + + +async def setup_test_database( + db_config: Optional[DatabaseConfig] = None, +) -> DatabaseClient: + """ + Setup a test database with all required tables. + + Args: + db_config: Optional database config. If None, uses environment variables. + + Returns: + DatabaseClient instance ready for use + """ + if db_config is None: + import os + + db_config = DatabaseConfig( + username=os.getenv('DB_USERNAME'), + password=os.getenv('DB_PASSWORD'), + host=os.getenv('DB_HOST'), + port=os.getenv('DB_PORT'), + db_name=os.getenv('DB_NAME'), + ) + + db_client = DatabaseClient(db_config) + await db_client.connect() + await ensure_tables_exist(db_client) + + return db_client + + +async def cleanup_test_database(db_client: DatabaseClient) -> None: + """ + Clean up test database by dropping IKB tables. + + Args: + db_client: DatabaseClient instance + """ + async with db_client._engine.begin() as connection: + # Drop tables in reverse order (due to foreign key constraints) + tables_to_drop = [ + 'sift_features', + 'reference_image_features', + 'image_knowledge_bases', + ] + + for table in tables_to_drop: + await connection.execute(text(f'DROP TABLE IF EXISTS {table} CASCADE;')) + + await db_client.close() + + +async def verify_tables_exist(db_client: DatabaseClient) -> None: + """ + Verify that all required tables exist and have the correct columns. + + Args: + db_client: DatabaseClient instance + """ + async with db_client._engine.begin() as connection: + # Check if tables exist + result = await connection.execute( + text(""" + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_name IN ('image_knowledge_bases', 'reference_image_features', 'sift_features') + ORDER BY table_name; + """) + ) + + tables = [row[0] for row in result.fetchall()] + print(f'Found tables: {tables}') + + # Check if reference_image_features has ikb_id column + if 'reference_image_features' in tables: + result = await connection.execute( + text(""" + SELECT column_name + FROM information_schema.columns + WHERE table_name = 'reference_image_features' + AND column_name = 'ikb_id'; + """) + ) + + ikb_id_column = result.fetchone() + if ikb_id_column: + print('โœ… reference_image_features table has ikb_id column') + else: + print('โŒ reference_image_features table missing ikb_id column') diff --git a/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py b/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py new file mode 100644 index 00000000..e933cb74 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py @@ -0,0 +1,627 @@ +""" +Comprehensive CRUD endpoint tests for Image Search Module +Tests all endpoints: Create, Read, Update, Delete operations +""" + +import pytest +import base64 +from unittest.mock import Mock, AsyncMock, MagicMock +from fastapi import FastAPI +from fastapi.testclient import TestClient +from datetime import datetime +from uuid import uuid4 + +from image_search_module.controllers.image_search_controller import image_search_router +from image_search_module.image_search_container import ImageSearchContainer +from image_search_module.algorithms.base import AlgorithmType +from image_search_module.models.ikb_models import ( + IKBType, + IKBStatus, +) +from common_module.common_container import CommonContainer +from db_repo_module.db_repo_container import DatabaseModuleContainer + + +class MockDbClient: + def __init__(self): + # Create a mock session factory + self.session = MagicMock() + # Mock the async context manager behavior + mock_session = MagicMock() + mock_session.add = Mock() + mock_session.commit = AsyncMock() + mock_session.refresh = AsyncMock() + mock_session.query = Mock() + mock_session.get = AsyncMock() + + self.session.return_value.__aenter__ = AsyncMock(return_value=mock_session) + self.session.return_value.__aexit__ = AsyncMock(return_value=None) + + +# Create a custom mock IKBInfo that serializes properly +class MockIKBInfo: + """Mock IKBInfo that serializes enums properly""" + + def __init__(self, **kwargs): + self.ikb_id = kwargs.get('ikb_id', str(uuid4())) + self.name = kwargs.get('name', 'Test IKB') + self.description = kwargs.get('description', 'Test IKB for unit testing') + self.ikb_type = kwargs.get('ikb_type', IKBType.GOLD_MATCHING) + self.algorithm_type = kwargs.get('algorithm_type', AlgorithmType.SIFT) + self.status = kwargs.get('status', IKBStatus.ACTIVE) + self.image_count = kwargs.get('image_count', 0) + self.created_at = kwargs.get('created_at', datetime.now()) + self.updated_at = kwargs.get('updated_at', datetime.now()) + self.config = kwargs.get('config', {'threshold': 0.8}) + + def dict(self): + """Return dictionary with enum values serialized as strings""" + return { + 'ikb_id': self.ikb_id, + 'name': self.name, + 'description': self.description, + 'ikb_type': self.ikb_type.value + if hasattr(self.ikb_type, 'value') + else str(self.ikb_type), + 'algorithm_type': self.algorithm_type.value + if hasattr(self.algorithm_type, 'value') + else str(self.algorithm_type), + 'status': self.status.value + if hasattr(self.status, 'value') + else str(self.status), + 'image_count': self.image_count, + 'created_at': self.created_at.isoformat() + if isinstance(self.created_at, datetime) + else self.created_at, + 'updated_at': self.updated_at.isoformat() + if isinstance(self.updated_at, datetime) + else self.updated_at, + 'config': self.config, + } + + def model_dump(self, mode='json'): + """Pydantic v2 compatibility - calls dict() method""" + return self.dict() + + +# Create a custom mock IKBSearchResponse that serializes properly +class MockIKBSearchResponse: + """Mock IKBSearchResponse that serializes properly""" + + def __init__(self, **kwargs): + self.query_id = kwargs.get('query_id', str(uuid4())) + self.ikb_id = kwargs.get('ikb_id', str(uuid4())) + self.ikb_name = kwargs.get('ikb_name', 'Test IKB') + self.algorithm_used = kwargs.get('algorithm_used', 'sift') + self.matches = kwargs.get('matches', []) + self.total_images_searched = kwargs.get('total_images_searched', 0) + self.processing_time_ms = kwargs.get('processing_time_ms', 0.0) + + def dict(self): + """Return dictionary representation""" + return { + 'query_id': self.query_id, + 'ikb_id': self.ikb_id, + 'ikb_name': self.ikb_name, + 'algorithm_used': self.algorithm_used, + 'matches': self.matches, + 'total_images_searched': self.total_images_searched, + 'processing_time_ms': self.processing_time_ms, + } + + +@pytest.fixture +def mock_containers(): + """Setup mock containers for testing""" + # Mock database container + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient() + db_repo_container.db_client.override(mock_db_client) + + # Mock common container + common_container = CommonContainer() + mock_cache_manager = Mock() + mock_cache_manager.get_str.return_value = ( + '{"user_id": "test_user", "session_id": "test_session"}' + ) + mock_cache_manager.add = Mock() + common_container.cache_manager.override(mock_cache_manager) + + # Mock image search container + mock_cloud_storage_manager = Mock() + mock_cloud_storage_manager.save_file = AsyncMock( + return_value='mock://storage/test.jpg' + ) + mock_cloud_storage_manager.get_file = AsyncMock(return_value=b'mock_data') + + image_search_container = ImageSearchContainer( + db_client=mock_db_client, + cloud_storage_manager=mock_cloud_storage_manager, + ) + + # Override the problematic providers directly + image_search_container.active_algorithm_type.override(AlgorithmType.SIFT) + + # Mock the repositories with proper async methods and correct return types + mock_ikb_repository = Mock() + mock_reference_features_repository = Mock() + mock_sift_features_repository = Mock() + + # Override the repository providers + image_search_container.ikb_repository.override(mock_ikb_repository) + image_search_container.reference_features_repository.override( + mock_reference_features_repository + ) + image_search_container.sift_features_repository.override( + mock_sift_features_repository + ) + + # Mock the services that depend on config + mock_algorithm_factory = Mock() + mock_algorithm_service = Mock() + mock_reference_image_service = Mock() + mock_image_matching_service = Mock() + + image_search_container.algorithm_factory.override(mock_algorithm_factory) + image_search_container.algorithm_service.override(mock_algorithm_service) + image_search_container.reference_image_service.override( + mock_reference_image_service + ) + image_search_container.image_matching_service.override(mock_image_matching_service) + + # Wire containers + common_container.wire(packages=['image_search_module.controllers']) + image_search_container.wire(packages=['image_search_module.controllers']) + + yield db_repo_container, common_container, image_search_container + + # Cleanup + common_container.unwire() + image_search_container.unwire() + + +@pytest.fixture +def test_app(mock_containers): + """Create test FastAPI app""" + app = FastAPI() + app.include_router(image_search_router, prefix='/floware') + return app + + +@pytest.fixture +def test_client(test_app): + """Create test client""" + return TestClient(test_app) + + +@pytest.fixture +def sample_image_data(): + """Create a sample base64 image data URL for testing""" + # Create a minimal 1x1 pixel PNG in base64 + # This is a valid but minimal PNG file + png_data = base64.b64decode( + 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' + ) + return f'data:image/png;base64,{base64.b64encode(png_data).decode()}' + + +@pytest.fixture +def sample_ikb_data(): + """Sample IKB data for testing""" + return { + 'name': 'Test IKB', + 'description': 'Test IKB for unit testing', + 'ikb_type': 'gold_matching', + 'algorithm_type': 'sift', + 'config': {'threshold': 0.8}, + } + + +@pytest.fixture +def mock_ikb_info(): + """Mock IKB info object that serializes properly""" + return MockIKBInfo( + ikb_id=str(uuid4()), + name='Test IKB', + description='Test IKB for unit testing', + ikb_type=IKBType.GOLD_MATCHING, + algorithm_type=AlgorithmType.SIFT, + status=IKBStatus.ACTIVE, + image_count=0, + created_at=datetime.now(), + updated_at=datetime.now(), + config={'threshold': 0.8}, + ) + + +class TestIKBCreateEndpoint: + """Test CREATE operations""" + + def test_create_ikb_success( + self, test_client, sample_ikb_data, mock_containers, mock_ikb_info + ): + """Test successful IKB creation""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service to return our mock IKB info + mock_ikb_service = Mock() + mock_ikb_service.create_ikb = AsyncMock(return_value=mock_ikb_info) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.post('/floware/ikb/create', json=sample_ikb_data) + + assert response.status_code == 201 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert response_data['data']['name'] == sample_ikb_data['name'] + assert response_data['data']['ikb_type'] == sample_ikb_data['ikb_type'] + assert ( + response_data['data']['algorithm_type'] == sample_ikb_data['algorithm_type'] + ) + + def test_create_ikb_invalid_data(self, test_client): + """Test IKB creation with invalid data""" + invalid_data = { + 'name': '', # Empty name should fail validation + 'ikb_type': 'invalid_type', + 'algorithm_type': 'invalid_algorithm', + } + + response = test_client.post('/floware/ikb/create', json=invalid_data) + assert response.status_code == 422 # Validation error + + def test_create_ikb_missing_required_fields(self, test_client): + """Test IKB creation with missing required fields""" + incomplete_data = { + 'name': 'Test IKB' + # Missing ikb_type and algorithm_type + } + + response = test_client.post('/floware/ikb/create', json=incomplete_data) + assert response.status_code == 422 # Validation error + + +class TestIKBReadEndpoints: + """Test READ operations""" + + def test_list_ikbs_success(self, test_client, mock_containers, mock_ikb_info): + """Test successful IKB listing""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service to return a list of IKBs + mock_ikb_service = Mock() + mock_ikb_service.list_ikbs = AsyncMock(return_value=[mock_ikb_info]) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.get('/floware/ikb/') + + assert response.status_code == 200 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert 'ikbs' in response_data['data'] + assert len(response_data['data']['ikbs']) == 1 + assert response_data['data']['ikbs'][0]['name'] == mock_ikb_info.name + + def test_list_ikbs_with_type_filter( + self, test_client, mock_containers, mock_ikb_info + ): + """Test IKB listing with type filter""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service + mock_ikb_service = Mock() + mock_ikb_service.list_ikbs = AsyncMock(return_value=[mock_ikb_info]) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.get('/floware/ikb/?ikb_type=gold_matching') + + assert response.status_code == 200 + # Verify that the service was called with the correct filter + mock_ikb_service.list_ikbs.assert_called_once_with( + ikb_type=IKBType.GOLD_MATCHING + ) + + def test_list_ikbs_empty(self, test_client, mock_containers): + """Test IKB listing when no IKBs exist""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service to return empty list + mock_ikb_service = Mock() + mock_ikb_service.list_ikbs = AsyncMock(return_value=[]) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.get('/floware/ikb/') + + assert response.status_code == 200 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert response_data['data']['ikbs'] == [] + + def test_get_ikb_success(self, test_client, mock_containers, mock_ikb_info): + """Test successful IKB retrieval by ID""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service + mock_ikb_service = Mock() + mock_ikb_service.get_ikb = AsyncMock(return_value=mock_ikb_info) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.get(f'/floware/ikb/{mock_ikb_info.ikb_id}') + + assert response.status_code == 200 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert response_data['data']['ikb_id'] == mock_ikb_info.ikb_id + assert response_data['data']['name'] == mock_ikb_info.name + + def test_get_ikb_not_found(self, test_client, mock_containers): + """Test IKB retrieval when IKB doesn't exist""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service to return None (not found) + mock_ikb_service = Mock() + mock_ikb_service.get_ikb = AsyncMock(return_value=None) + image_search_container.ikb_service.override(mock_ikb_service) + + fake_id = str(uuid4()) + response = test_client.get(f'/floware/ikb/{fake_id}') + + assert response.status_code == 404 + response_data = response.json() + assert response_data['meta']['status'] == 'failure' + assert f'IKB with ID {fake_id} not found' in response_data['meta']['error'] + + +class TestIKBUpdateOperations: + """Test UPDATE operations (adding images to IKB)""" + + def test_add_image_to_ikb_success( + self, test_client, mock_containers, sample_image_data, mock_ikb_info + ): + """Test successful image addition to IKB""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service + mock_ikb_service = Mock() + mock_result = { + 'status': 'success', + 'reference_id': str(uuid4()), + 'message': 'Image added successfully', + } + mock_ikb_service.add_image_to_ikb = AsyncMock(return_value=mock_result) + image_search_container.ikb_service.override(mock_ikb_service) + + payload = { + 'image_data': sample_image_data, + 'reference_id': 'test_ref_123', + 'metadata': {'source': 'test'}, + } + + response = test_client.post( + f'/floware/ikb/{mock_ikb_info.ikb_id}/add', json=payload + ) + + assert response.status_code == 201 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert response_data['data']['status'] == 'success' + + def test_add_image_to_ikb_invalid_image_data(self, test_client, mock_ikb_info): + """Test image addition with invalid image data""" + invalid_payload = { + 'image_data': 'invalid_base64_data', # Invalid format + 'reference_id': 'test_ref_123', + } + + response = test_client.post( + f'/floware/ikb/{mock_ikb_info.ikb_id}/add', json=invalid_payload + ) + assert response.status_code == 422 # Validation error + + def test_search_in_ikb_success( + self, test_client, mock_containers, sample_image_data, mock_ikb_info + ): + """Test successful image search in IKB""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service + mock_ikb_service = Mock() + mock_search_response = MockIKBSearchResponse( + query_id=str(uuid4()), + ikb_id=mock_ikb_info.ikb_id, + ikb_name=mock_ikb_info.name, + algorithm_used='sift', + matches=[ + { + 'reference_id': 'ref_1', + 'match_score': 0.95, + 'confidence': 0.9, + 'metadata': {}, + } + ], + total_images_searched=10, + processing_time_ms=150.5, + ) + mock_ikb_service.search_in_ikb = AsyncMock(return_value=mock_search_response) + image_search_container.ikb_service.override(mock_ikb_service) + + payload = { + 'ikb_id': mock_ikb_info.ikb_id, + 'image_data': sample_image_data, + 'max_results': 5, + 'threshold': 0.8, + } + + response = test_client.post( + f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=payload + ) + + assert response.status_code == 200 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert response_data['data']['ikb_id'] == mock_ikb_info.ikb_id + assert response_data['data']['algorithm_used'] == 'sift' + assert len(response_data['data']['matches']) == 1 + + def test_search_in_ikb_invalid_image_data(self, test_client, mock_ikb_info): + """Test image search with invalid image data""" + invalid_payload = { + 'ikb_id': mock_ikb_info.ikb_id, + 'image_data': 'invalid_base64_data', # Invalid format + 'max_results': 5, + } + + response = test_client.post( + f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=invalid_payload + ) + assert response.status_code == 422 # Validation error + + def test_search_in_ikb_invalid_max_results( + self, test_client, sample_image_data, mock_ikb_info + ): + """Test image search with invalid max_results parameter""" + invalid_payload = { + 'ikb_id': mock_ikb_info.ikb_id, + 'image_data': sample_image_data, + 'max_results': 150, # Exceeds maximum of 100 + } + + response = test_client.post( + f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=invalid_payload + ) + assert response.status_code == 422 # Validation error + + +class TestIKBDeleteEndpoint: + """Test DELETE operations""" + + def test_delete_ikb_success(self, test_client, mock_containers, mock_ikb_info): + """Test successful IKB deletion""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service + mock_ikb_service = Mock() + mock_ikb_service.delete_ikb = AsyncMock(return_value=True) + image_search_container.ikb_service.override(mock_ikb_service) + + response = test_client.delete(f'/floware/ikb/{mock_ikb_info.ikb_id}') + + assert response.status_code == 200 + response_data = response.json() + assert response_data['meta']['status'] == 'success' + assert 'deleted successfully' in response_data['data']['message'] + + def test_delete_ikb_not_found(self, test_client, mock_containers): + """Test IKB deletion when IKB doesn't exist""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock the IKB service to return False (not found) + mock_ikb_service = Mock() + mock_ikb_service.delete_ikb = AsyncMock(return_value=False) + image_search_container.ikb_service.override(mock_ikb_service) + + fake_id = str(uuid4()) + response = test_client.delete(f'/floware/ikb/{fake_id}') + + assert response.status_code == 404 + response_data = response.json() + assert response_data['meta']['status'] == 'failure' + assert f'IKB with ID {fake_id} not found' in response_data['meta']['error'] + + +class TestEndpointIntegration: + """Integration tests for complete workflows""" + + def test_complete_ikb_lifecycle( + self, test_client, mock_containers, sample_ikb_data, sample_image_data + ): + """Test complete IKB lifecycle: create -> add image -> search -> delete""" + db_repo_container, common_container, image_search_container = mock_containers + + # Create a mock IKB info that serializes properly + mock_ikb_info = MockIKBInfo( + ikb_id=str(uuid4()), + name=sample_ikb_data['name'], + description=sample_ikb_data['description'], + ikb_type=IKBType.GOLD_MATCHING, + algorithm_type=AlgorithmType.SIFT, + status=IKBStatus.ACTIVE, + image_count=0, + created_at=datetime.now(), + updated_at=datetime.now(), + config=sample_ikb_data['config'], + ) + + # Mock the IKB service for all operations + mock_ikb_service = Mock() + mock_ikb_service.create_ikb = AsyncMock(return_value=mock_ikb_info) + mock_ikb_service.add_image_to_ikb = AsyncMock( + return_value={'status': 'success', 'reference_id': 'ref_123'} + ) + mock_ikb_service.search_in_ikb = AsyncMock( + return_value=MockIKBSearchResponse( + query_id=str(uuid4()), + ikb_id=mock_ikb_info.ikb_id, + ikb_name=mock_ikb_info.name, + algorithm_used='sift', + matches=[], + total_images_searched=1, + processing_time_ms=100.0, + ) + ) + mock_ikb_service.delete_ikb = AsyncMock(return_value=True) + + image_search_container.ikb_service.override(mock_ikb_service) + + # 1. Create IKB + create_response = test_client.post('/floware/ikb/create', json=sample_ikb_data) + assert create_response.status_code == 201 + created_ikb = create_response.json()['data'] + ikb_id = created_ikb['ikb_id'] + + # 2. Add image to IKB + add_payload = { + 'ikb_id': ikb_id, + 'image_data': sample_image_data, + 'reference_id': 'test_ref_123', + } + add_response = test_client.post(f'/floware/ikb/{ikb_id}/add', json=add_payload) + assert add_response.status_code == 201 + + # 3. Search in IKB + search_payload = { + 'ikb_id': ikb_id, + 'image_data': sample_image_data, + 'max_results': 5, + } + search_response = test_client.post( + f'/floware/ikb/{ikb_id}/search', json=search_payload + ) + assert search_response.status_code == 200 + + # 4. Delete IKB + delete_response = test_client.delete(f'/floware/ikb/{ikb_id}') + assert delete_response.status_code == 200 + + def test_error_handling_consistency(self, test_client, mock_containers): + """Test that error responses are consistent across endpoints""" + db_repo_container, common_container, image_search_container = mock_containers + + # Mock service to raise an exception + mock_ikb_service = Mock() + mock_ikb_service.get_ikb = AsyncMock(side_effect=Exception('Database error')) + image_search_container.ikb_service.override(mock_ikb_service) + + fake_id = str(uuid4()) + + # Since the controller doesn't handle exceptions, this will result in a 500 error + # We need to catch the exception that will be raised by the test client + with pytest.raises(Exception) as exc_info: + test_client.get(f'/floware/ikb/{fake_id}') + + # Verify that the exception contains our expected error message + assert 'Database error' in str(exc_info.value) + + +if __name__ == '__main__': + pytest.main([__file__, '-v']) diff --git a/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py b/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py new file mode 100644 index 00000000..9541bfe2 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py @@ -0,0 +1,334 @@ +# import asyncio +# import sys +# import base64 +# from pathlib import Path +# import pytest + + +# from image_search_module.services.ikb_service import IKBService +# from image_search_module.services.image_matching_service import ImageMatchingService +# from image_search_module.services.reference_image_service import ReferenceImageService +# from image_search_module.services.algorithm_service import AlgorithmService +# from image_search_module.services.algorithm_factory import AlgorithmFactory +# from image_search_module.repositories.sift_features_repository import ( +# SIFTFeaturesRepository, +# ) +# from image_search_module.repositories.ikb_repository import IKBRepository +# from image_search_module.algorithms.base import AlgorithmType + +# # from .db_setup import setup_test_database +# from image_search_module.models.ikb_models import ( +# CreateIKBRequest, +# IKBImageAddRequest, +# IKBType, +# IKBStatus, +# ) +# from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +# from db_repo_module.models.image_search_models import ( +# ReferenceImageFeatures, +# SIFTFeatures, +# ) +# from db_repo_module.models.ikb_models import ImageKnowledgeBase + +# import logging + + +# logging.basicConfig(level=logging.INFO) +# logger = logging.getLogger(__name__) + + +# class MockCloudStorage: +# """Mock cloud storage for testing""" + +# def __init__(self): +# self.files = {} + +# async def save_file(self, file_path: str, file_data: bytes) -> str: +# """Save file and return URL""" +# self.files[file_path] = file_data +# return f'mock://storage/{file_path}' + +# async def save_small_file( +# self, file_content: bytes, bucket_name: str, key: str +# ) -> str: +# """Save small file and return URL - matches the expected signature""" +# self.files[key] = file_content +# return f'https://mock-bucket.com/{key}' + + +# async def setup_services(): +# """Set up all required services for testing""" +# logger.info('Setting up services...') + +# # Setup database with tables +# db_client = await setup_test_database() + +# # Create repositories +# features_repository = SQLAlchemyRepository(ReferenceImageFeatures, db_client) +# sift_features_repository = SIFTFeaturesRepository(SIFTFeatures, db_client) + +# # Create IKB repository +# ikb_repository_db = SQLAlchemyRepository(ImageKnowledgeBase, db_client) +# ikb_repository = IKBRepository(ikb_repository_db) + +# # Create services +# algorithm_factory = AlgorithmFactory() +# algorithm_service = AlgorithmService(algorithm_factory) +# cloud_storage = MockCloudStorage() +# reference_image_service = ReferenceImageService( +# cloud_storage_manager=cloud_storage, +# features_repository=features_repository, +# sift_features_repository=sift_features_repository, +# algorithm_service=algorithm_service, +# bucket_name='test-bucket', +# ) +# image_matching_service = ImageMatchingService( +# algorithm_factory=algorithm_factory, +# reference_service=reference_image_service, +# active_algorithm_type=AlgorithmType.SIFT, +# algorithm_config={'sift': {'max_features': 1000}}, +# ) +# ikb_service = IKBService( +# image_matching_service=image_matching_service, +# reference_image_service=reference_image_service, +# ikb_repository=ikb_repository, +# ) + +# return ikb_service, db_client + + +# def image_to_base64_data_url(image_path: str) -> str: +# """Convert image file to base64 data URL""" +# with open(image_path, 'rb') as image_file: +# image_data = image_file.read() +# base64_data = base64.b64encode(image_data).decode('utf-8') +# return f'data:image/png;base64,{base64_data}' + + +# @pytest.mark.skip(reason='Skipping') +# async def test_create_ikb_and_add_images(): +# """Test creating an IKB and adding multiple images to it""" +# logger.info('๐Ÿงช Test: Create IKB and add Images') + +# ikb_service, db_client = await setup_services() + +# try: +# # Step 1: Create IKB +# logger.info(' Step 1: Creating IKB...') +# create_request = CreateIKBRequest( +# name='Gold Image Matching IKB', +# description='Test IKB for gold image matching and analysis', +# ikb_type=IKBType.GOLD_MATCHING, +# algorithm_type=AlgorithmType.SIFT, +# config={'threshold': 0.8, 'max_features': 1000}, +# ) + +# ikb_info = await ikb_service.create_ikb(create_request) +# logger.info(f'โœ… IKB created: {ikb_info.ikb_id}') +# logger.info(f' Name: {ikb_info.name}') +# logger.info(f' Type: {ikb_info.ikb_type}') +# logger.info(f' Algorithm: {ikb_info.algorithm_type}') +# logger.info(f' Status: {ikb_info.status}') +# logger.info(f' Image Count: {ikb_info.image_count}') + +# # Step 2: add multiple images using real test images +# logger.info('๏ฟฝ๏ฟฝ Step 2: adding images...') +# test_images_dir = Path(__file__).parent / 'test_images' + +# # Use the actual test images +# test_images = [ +# {'name': 'image1.png', 'description': 'Test image 1'}, +# {'name': 'image2.png', 'description': 'Test image 2'}, +# {'name': 'image3.png', 'description': 'Test image 3'}, +# ] + +# added_images = [] +# for i, img_info in enumerate(test_images, 1): +# logger.info(f" adding image {i}/3: {img_info['name']}") + +# # Get the full path to the test image +# image_path = test_images_dir / img_info['name'] + +# add_request = IKBImageAddRequest( +# ikb_id=ikb_info.ikb_id, +# image_data=image_to_base64_data_url(str(image_path)), +# reference_id=f'test_image_{i}', +# metadata={ +# 'description': img_info['description'], +# 'image_file': img_info['name'], +# }, +# ) + +# result = await ikb_service.add_image_to_ikb(add_request) +# added_images.append(result) +# logger.info(f" โœ… added: {result['reference_id']}") # Use correct key + +# # Step 3: Verify IKB properties +# logger.info('๐Ÿ” Step 3: Verifying IKB properties...') +# updated_ikb = await ikb_service.get_ikb(ikb_info.ikb_id) + +# assert updated_ikb is not None, 'IKB should exist' +# assert ( +# updated_ikb.image_count == 3 +# ), f'Expected 3 images, got {updated_ikb.image_count}' +# assert ( +# updated_ikb.status == IKBStatus.ACTIVE +# ), f'Expected ACTIVE status, got {updated_ikb.status}' + +# logger.info('โœ… IKB verification passed:') +# logger.info(f' - Image count: {updated_ikb.image_count}') +# logger.info(f' - Status: {updated_ikb.status}') +# logger.info(f' - Created at: {updated_ikb.created_at}') +# logger.info(f' - Updated at: {updated_ikb.updated_at}') + +# # Step 4: List all IKBs +# logger.info('๐Ÿ“‹ Step 4: Listing all IKBs...') +# all_ikbs = await ikb_service.list_ikbs() +# logger.info(f' Found {len(all_ikbs)} IKB(s)') +# for ikb in all_ikbs: +# logger.info(f' - {ikb.name} ({ikb.ikb_id}): {ikb.image_count} images') + +# logger.info('๐ŸŽ‰ Test completed successfully!') +# return ikb_info.ikb_id, added_images + +# except Exception as e: +# logger.error(f'โŒ Test failed: {e}') +# import traceback + +# traceback.print_exc() +# raise +# finally: +# await db_client.close() + + +# @pytest.mark.skip(reason='Skipping') +# async def test_ikb_search_with_query_image(): +# """Test searching within an IKB using the query image""" +# logger.info(' Test: IKB Search with Query Image') + +# ikb_service, db_client = await setup_services() + +# try: +# # Step 1: Create IKB +# logger.info(' Step 1: Creating IKB...') +# create_request = CreateIKBRequest( +# name='Photo Matching IKB', +# description='Test IKB for photo matching and similarity search', +# ikb_type=IKBType.PHOTO_MATCHING, +# algorithm_type=AlgorithmType.SIFT, +# config={'threshold': 0.7, 'max_features': 1000}, +# ) + +# ikb_info = await ikb_service.create_ikb(create_request) +# logger.info(f'โœ… Created IKB: {ikb_info.name} (ID: {ikb_info.ikb_id})') + +# # Step 2: add reference images using real test images +# logger.info('๐Ÿ“ค Step 2: adding reference images...') +# test_images_dir = Path(__file__).parent / 'test_images' + +# # add the reference images +# reference_images = [] +# for i, image_name in enumerate(['image1.png', 'image2.png', 'image3.png'], 1): +# image_path = test_images_dir / image_name + +# add_request = IKBImageAddRequest( +# ikb_id=ikb_info.ikb_id, +# image_data=image_to_base64_data_url(str(image_path)), +# reference_id=f'ref-photo-{i:03d}', +# metadata={'category': f'photo_{i}', 'add_order': i}, +# ) + +# add_result = await ikb_service.add_image_to_ikb(add_request) +# reference_images.append(add_result['reference_id']) # Use correct key +# logger.info(f"โœ… added reference {i}: {add_result['reference_id']}") + +# # Verify all images added +# updated_ikb = await ikb_service.get_ikb(ikb_info.ikb_id) +# assert updated_ikb.image_count == 3 +# logger.info(f'โœ… IKB has {updated_ikb.image_count} reference images') + +# # Step 3: Search with query image +# logger.info('๐Ÿ” Step 3: Searching with query image...') +# query_image_path = test_images_dir / 'query.png' + +# from image_search_module.models.ikb_models import IKBSearchRequest + +# search_request = IKBSearchRequest( +# ikb_id=ikb_info.ikb_id, +# image_data=image_to_base64_data_url(str(query_image_path)), +# max_results=5, +# threshold=0.6, +# ) + +# search_result = await ikb_service.search_in_ikb(search_request) +# logger.info(f'โœ… Search completed: {len(search_result.matches)} matches found') + +# # Verify search results +# assert search_result.ikb_id == ikb_info.ikb_id +# assert search_result.ikb_name == ikb_info.name +# assert search_result.algorithm_used == 'sift' +# assert search_result.total_images_searched == 3 +# assert len(search_result.matches) > 0 + +# # Step 4: Analyze search results +# logger.info(' Step 4: Analyzing search results...') +# logger.info(f' Query ID: {search_result.query_id}') +# logger.info(f' IKB: {search_result.ikb_name}') +# logger.info(f' Algorithm: {search_result.algorithm_used}') +# logger.info(f' Total images searched: {search_result.total_images_searched}') +# logger.info(f' Processing time: {search_result.processing_time_ms:.2f}ms') +# logger.info(f' Matches found: {len(search_result.matches)}') + +# # Log detailed match information +# for i, match in enumerate(search_result.matches): +# logger.info(f' Match {i+1}:') +# logger.info(f" - Reference ID: {match['reference_id']}") +# logger.info(f" - Match Score: {match['match_score']:.4f}") +# logger.info(f" - Is Match: {match['is_match']}") +# logger.info(f" - Confidence: {match['confidence']:.4f}") +# logger.info(f" - Processing Time: {match['processing_time_ms']:.2f}ms") + +# logger.info(' Search test completed successfully!') +# logger.info('๐Ÿ“Š Summary:') +# logger.info(f' - IKB: {ikb_info.name}') +# logger.info(f' - Reference Images: {len(reference_images)}') +# logger.info(' - Query Image: query.png') +# logger.info(f' - Matches Found: {len(search_result.matches)}') +# logger.info( +# f" - Best Match Score: {max(match['match_score'] for match in search_result.matches):.4f}" +# ) + +# return ikb_info.ikb_id, search_result + +# except Exception as e: +# logger.error(f'โŒ Test failed: {e}') +# import traceback + +# traceback.print_exc() +# raise +# finally: +# await db_client.close() + + +# async def main(): +# """Main test function""" +# try: +# # Test 1: Create IKB and add images +# # logger.info('๐Ÿš€ Starting IKB Create and add Test') +# # ikb_id, added_images = await test_create_ikb_and_add_images() +# # logger.info(f'โœ… Create/add test passed! IKB ID: {ikb_id}') +# # logger.info(f'โœ… added {len(added_images)} images') + +# # Test 2: Search with query image +# logger.info('\n๐Ÿš€ Starting IKB Search Test') +# search_ikb_id, search_result = await test_ikb_search_with_query_image() +# logger.info(f'โœ… Search test passed! IKB ID: {search_ikb_id}') +# logger.info(f'โœ… Found {len(search_result.matches)} matches') + +# except Exception as e: +# logger.error(f'โŒ Test failed: {e}') +# sys.exit(1) + + +# if __name__ == '__main__': +# asyncio.run(main()) diff --git a/wavefront/server/modules/image_search_module/tests/test_image_controller.py b/wavefront/server/modules/image_search_module/tests/test_image_controller.py new file mode 100644 index 00000000..585df502 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/test_image_controller.py @@ -0,0 +1,203 @@ +""" +Simple test to verify image search module wiring without complex dependencies +""" + +import pytest +from unittest.mock import Mock, AsyncMock, MagicMock +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from image_search_module.controllers.image_search_controller import image_search_router +from image_search_module.image_search_container import ImageSearchContainer +from image_search_module.algorithms.base import AlgorithmType +from common_module.common_container import CommonContainer +from db_repo_module.db_repo_container import DatabaseModuleContainer + + +class MockDbClient: + def __init__(self): + # Create a mock session factory + self.session = MagicMock() + # Mock the async context manager behavior + mock_session = MagicMock() + mock_session.add = Mock() + mock_session.commit = AsyncMock() + mock_session.refresh = AsyncMock() + mock_session.query = Mock() + mock_session.get = AsyncMock() + + self.session.return_value.__aenter__ = AsyncMock(return_value=mock_session) + self.session.return_value.__aexit__ = AsyncMock(return_value=None) + + +@pytest.fixture +def mock_containers(): + """Setup mock containers for testing""" + # Mock database container + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient() + db_repo_container.db_client.override(mock_db_client) + + # Mock common container + common_container = CommonContainer() + mock_cache_manager = Mock() + mock_cache_manager.get_str.return_value = ( + '{"user_id": "test_user", "session_id": "test_session"}' + ) + mock_cache_manager.add = Mock() + common_container.cache_manager.override(mock_cache_manager) + + # Mock image search container + mock_cloud_storage_manager = Mock() + mock_cloud_storage_manager.save_file = AsyncMock( + return_value='mock://storage/test.jpg' + ) + mock_cloud_storage_manager.get_file = AsyncMock(return_value=b'mock_data') + + image_search_container = ImageSearchContainer( + db_client=mock_db_client, + cloud_storage_manager=mock_cloud_storage_manager, + ) + + # Override the problematic providers directly + image_search_container.active_algorithm_type.override(AlgorithmType.SIFT) + + # Mock the repositories with proper async methods and correct return types + mock_ikb_repository = Mock() + # Return empty dict instead of empty list for list_ikbs + mock_ikb_repository.list_ikbs = AsyncMock(return_value=[]) + mock_ikb_repository.get_ikb = AsyncMock(return_value=None) + mock_ikb_repository.create_ikb = AsyncMock(return_value=Mock()) + mock_ikb_repository.delete_ikb = AsyncMock(return_value=True) + + mock_reference_features_repository = Mock() + mock_reference_features_repository.create = AsyncMock(return_value=Mock()) + mock_reference_features_repository.get = AsyncMock(return_value=None) + + mock_sift_features_repository = Mock() + mock_sift_features_repository.create = AsyncMock(return_value=Mock()) + mock_sift_features_repository.get = AsyncMock(return_value=None) + + # Override the repository providers + image_search_container.ikb_repository.override(mock_ikb_repository) + image_search_container.reference_features_repository.override( + mock_reference_features_repository + ) + image_search_container.sift_features_repository.override( + mock_sift_features_repository + ) + + # Mock the services that depend on config + mock_algorithm_factory = Mock() + mock_algorithm_service = Mock() + mock_reference_image_service = Mock() + mock_reference_image_service.add_image_to_ikb = AsyncMock( + return_value={'status': 'success'} + ) + mock_reference_image_service.search_in_ikb = AsyncMock(return_value=Mock()) + + mock_image_matching_service = Mock() + + image_search_container.algorithm_factory.override(mock_algorithm_factory) + image_search_container.algorithm_service.override(mock_algorithm_service) + image_search_container.reference_image_service.override( + mock_reference_image_service + ) + image_search_container.image_matching_service.override(mock_image_matching_service) + + # Wire containers + common_container.wire(packages=['image_search_module.controllers']) + image_search_container.wire(packages=['image_search_module.controllers']) + + yield db_repo_container, common_container, image_search_container + + # Cleanup + common_container.unwire() + image_search_container.unwire() + + +@pytest.fixture +def test_app(mock_containers): + """Create test FastAPI app""" + app = FastAPI() + app.include_router(image_search_router, prefix='/floware') + return app + + +@pytest.fixture +def test_client(test_app): + """Create test client""" + return TestClient(test_app) + + +def test_app_creation(test_app): + """Test that the FastAPI app can be created with the router""" + assert test_app is not None + # Check that routes are registered + routes = [route.path for route in test_app.routes] + assert '/floware/ikb/' in routes + assert '/floware/ikb/create' in routes + + +def test_router_inclusion(test_app): + """Test that the image search router is properly included""" + # Check that the router is included + assert len(test_app.routes) > 0 + + # Check for specific routes + route_paths = [route.path for route in test_app.routes if hasattr(route, 'path')] + expected_paths = [ + '/floware/ikb/', + '/floware/ikb/create', + '/floware/ikb/{ikb_id}', + '/floware/ikb/{ikb_id}/add', + '/floware/ikb/{ikb_id}/search', + ] + + for expected_path in expected_paths: + assert any( + expected_path in path for path in route_paths + ), f'Route {expected_path} not found' + + +def test_container_wiring(mock_containers): + """Test that containers can be wired without errors""" + db_repo_container, common_container, image_search_container = mock_containers + + # Test that containers are properly set up + assert db_repo_container is not None + assert common_container is not None + assert image_search_container is not None + + # Test that services can be accessed + try: + ikb_service = image_search_container.ikb_service() + assert ikb_service is not None + print('โœ… IKB service created successfully') + except Exception as e: + pytest.fail(f'Failed to get ikb_service: {e}') + + +def test_basic_endpoint_access(test_client): + """Test that endpoints are accessible (even if they return errors)""" + # Test GET /floware/ikb/ - should return some response (not 404) + response = test_client.get('/floware/ikb/') + print(f'GET /floware/ikb/ status: {response.status_code}') + # Should not be 404 (route not found) + assert response.status_code != 404, 'Route not found - wiring issue' + + # Test POST /floware/ikb/create - should return some response (not 404) + response = test_client.post('/floware/ikb/create', json={}) + print(f'POST /floware/ikb/create status: {response.status_code}') + # Should not be 404 (route not found) + assert response.status_code != 404, 'Route not found - wiring issue' + + +def test_invalid_endpoint_returns_404(test_client): + """Test that invalid endpoints return 404""" + response = test_client.get('/floware/invalid-endpoint') + assert response.status_code == 404 + + +if __name__ == '__main__': + pytest.main([__file__, '-v']) diff --git a/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh b/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh new file mode 100755 index 00000000..1d306079 --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh @@ -0,0 +1,150 @@ +#!/bin/bash + +# Simple Image Search Test Script (No Authentication) + +set -e # Exit on any error + +# Configuration +BASE_URL="http://0.0.0.0:8001" +IMAGE_FILE="query.png" +IKB_ID="fc562847-e2cf-4f6e-bd1e-708a7a3be8f8" # Replace with your IKB ID + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo -e "${BLUE}=== Image Search Test (No Auth) ===${NC}" +echo "Base URL: $BASE_URL" +echo "Image File: $IMAGE_FILE" +echo "" + +# Step 1: Check if image file exists +if [ ! -f "$IMAGE_FILE" ]; then + echo -e "${RED}Error: Image file '$IMAGE_FILE' not found${NC}" + echo "Available files in test_images directory:" + ls -la "modules/image_search_module/tests/test_images/" 2>/dev/null || echo "Directory not found" + exit 1 +fi + +echo -e "${GREEN}โœ“ Image file found${NC}" +echo "" + +# Step 2: Prepare image data +echo -e "${YELLOW}Step 1: Preparing image data...${NC}" +base64 -i "$IMAGE_FILE" | tr -d '\n' > /tmp/image_base64.txt +echo -e "${GREEN}โœ“ Image converted to base64 ($(wc -c < /tmp/image_base64.txt) characters)${NC}" +echo "" + +# Step 3: Add image to IKB +echo -e "${YELLOW}Step 2: Adding image to IKB...${NC}" + +# Create request payload for adding image +cat > /tmp/add_payload.json << EOF +{ + "image_data": "data:image/png;base64,$(cat /tmp/image_base64.txt)", + "reference_id": "test_reference_$(date +%s)", + "metadata": { + "description": "Test image for no-auth script", + "category": "test", + "source": "no_auth_test", + "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)" + } +} +EOF + +echo "Request payload size: $(wc -c < /tmp/add_payload.json) characters" + +ADD_RESPONSE=$(curl -s -X POST \ + -H "Content-Type: application/json" \ + -d @/tmp/add_payload.json \ + "$BASE_URL/floware/ikb/$IKB_ID/add") + +echo "Add Image Response: $ADD_RESPONSE" +echo "" + +# Check if add was successful +ADD_STATUS=$(echo "$ADD_RESPONSE" | jq -r '.meta.status // "unknown"') +if [ "$ADD_STATUS" = "success" ]; then + echo -e "${GREEN}โœ“ Successfully added image to IKB${NC}" + + # Extract reference ID for verification + REFERENCE_ID=$(echo "$ADD_RESPONSE" | jq -r '.data.reference_id // "unknown"') + echo "Reference ID: $REFERENCE_ID" +else + echo -e "${RED}โœ— Failed to add image to IKB${NC}" + echo "Error: $(echo "$ADD_RESPONSE" | jq -r '.meta.error // "Unknown error"')" + # Continue to search anyway to test the search endpoint +fi + +echo "" + +# Step 4: Search for the same image +echo -e "${YELLOW}Step 3: Searching for the same image...${NC}" + +# Create search payload +cat > /tmp/search_payload.json << EOF +{ + "image_data": "data:image/png;base64,$(cat /tmp/image_base64.txt)", + "max_results": 5, + "threshold": 0.7 +} +EOF + +echo "Search payload size: $(wc -c < /tmp/search_payload.json) characters" + +SEARCH_RESPONSE=$(curl -s -X POST \ + -H "Content-Type: application/json" \ + -d @/tmp/search_payload.json \ + "$BASE_URL/floware/ikb/$IKB_ID/search") + +echo "Search Response: $SEARCH_RESPONSE" +echo "" + +# Check search results +SEARCH_STATUS=$(echo "$SEARCH_RESPONSE" | jq -r '.meta.status // "unknown"') +if [ "$SEARCH_STATUS" = "success" ]; then + echo -e "${GREEN}โœ“ Search completed successfully${NC}" + + # Extract and display match count + MATCH_COUNT=$(echo "$SEARCH_RESPONSE" | jq -r '.data.matches | length // 0') + echo "Number of matches found: $MATCH_COUNT" + + if [ "$MATCH_COUNT" -gt 0 ]; then + echo -e "${GREEN}โœ“ Found matching images!${NC}" + + # Display match details + echo "Match details:" + echo "$SEARCH_RESPONSE" | jq '.data.matches[] | {reference_id: .reference_id, match_score: .match_score, confidence: .confidence, is_match: .is_match}' + + # Check if our added image is in the results + if [ -n "$REFERENCE_ID" ] && [ "$REFERENCE_ID" != "unknown" ]; then + FOUND_OUR_IMAGE=$(echo "$SEARCH_RESPONSE" | jq -r --arg ref_id "$REFERENCE_ID" '.data.matches[] | select(.reference_id == $ref_id) | .reference_id // empty') + if [ -n "$FOUND_OUR_IMAGE" ]; then + echo -e "${GREEN}โœ“ Our added image was found in search results!${NC}" + else + echo -e "${YELLOW}โš  Our added image was not found in search results${NC}" + fi + fi + else + echo -e "${YELLOW}โš  No matches found${NC}" + fi +else + echo -e "${RED}โœ— Search failed${NC}" + echo "Error: $(echo "$SEARCH_RESPONSE" | jq -r '.meta.error // "Unknown error"')" +fi + +echo "" + +# Step 5: Test IKB info endpoint +echo -e "${YELLOW}Step 4: Testing IKB info endpoint...${NC}" +IKB_INFO_RESPONSE=$(curl -s -X GET "$BASE_URL/floware/ikb/$IKB_ID") +echo "IKB Info Response: $IKB_INFO_RESPONSE" + +# Clean up temporary files +rm -f /tmp/image_base64.txt /tmp/add_payload.json /tmp/search_payload.json + +echo "" +echo -e "${BLUE}=== Test Complete ===${NC}" diff --git a/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh b/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh new file mode 100755 index 00000000..f059b66a --- /dev/null +++ b/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh @@ -0,0 +1,208 @@ +#!/bin/bash + +# Image Search API Testing Script for Staging +# Usage: ./test_image_search_staging.sh + +set -e # Exit on any error + +# Configuration +STAGING_BASE_URL="https://staging.rootflo.ai" +AUTH_EMAIL="" # Replace with your email +AUTH_PASSWORD="" # Replace with your password +IMAGE_FILE="image1.png" # Replace with your image file path +IKB_ID="fc562847-e2cf-4f6e-bd1e-708a7a3be8f8" # Replace with your IKB ID + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo -e "${BLUE}=== Image Search API Testing Script for Staging ===${NC}" +echo "Base URL: $STAGING_BASE_URL" +echo "" + +# Function to make authenticated requests +make_authenticated_request() { + local method=$1 + local endpoint=$2 + local data=$3 + local content_type=${4:-"application/json"} + + if [ -z "$BEARER_TOKEN" ]; then + echo -e "${RED}Error: No bearer token available${NC}" + return 1 + fi + + if [ -n "$data" ]; then + # Check if data is too large for command line (roughly > 1MB) + if [ ${#data} -gt 1000000 ]; then + # Use temporary file for large payloads + local temp_file=$(mktemp) + echo "$data" > "$temp_file" + curl -s -X "$method" \ + -H "Content-Type: $content_type" \ + -H "Authorization: Bearer $BEARER_TOKEN" \ + -d @"$temp_file" \ + "$STAGING_BASE_URL$endpoint" + rm -f "$temp_file" + else + # Use direct data for small payloads + curl -s -X "$method" \ + -H "Content-Type: $content_type" \ + -H "Authorization: Bearer $BEARER_TOKEN" \ + -d "$data" \ + "$STAGING_BASE_URL$endpoint" + fi + else + curl -s -X "$method" \ + -H "Authorization: Bearer $BEARER_TOKEN" \ + "$STAGING_BASE_URL$endpoint" + fi +} + +# Step 1: Authenticate and get bearer token +echo -e "${YELLOW}Step 1: Authenticating...${NC}" +AUTH_RESPONSE=$(curl -s -X POST \ + -H "Content-Type: application/json" \ + -d "{\"email\": \"$AUTH_EMAIL\", \"password\": \"$AUTH_PASSWORD\"}" \ + "$STAGING_BASE_URL/floware/v1/authenticate") + +echo "Auth Response: $AUTH_RESPONSE" + +# Extract bearer token from response +BEARER_TOKEN=$(echo "$AUTH_RESPONSE" | jq -r '.data.user.access_token // empty') + +if [ -z "$BEARER_TOKEN" ] || [ "$BEARER_TOKEN" = "null" ]; then + echo -e "${RED}Error: Failed to get bearer token${NC}" + echo "Response: $AUTH_RESPONSE" + exit 1 +fi + +echo -e "${GREEN}โœ“ Successfully authenticated${NC}" +echo "Bearer Token: ${BEARER_TOKEN:0:50}..." +echo "" + +# Step 2: Check if image file exists +if [ ! -f "$IMAGE_FILE" ]; then + echo -e "${RED}Error: Image file '$IMAGE_FILE' not found${NC}" + exit 1 +fi + +echo -e "${YELLOW}Step 2: Preparing image data...${NC}" + +# Convert image to base64 +BASE64_DATA=$(base64 -i "$IMAGE_FILE" | tr -d '\n') +IMAGE_DATA_URL="data:image/png;base64,${BASE64_DATA}" + +echo -e "${GREEN}โœ“ Image converted to base64 (${#BASE64_DATA} characters)${NC}" +echo "" + +# Step 3: Test IKB endpoints +echo -e "${YELLOW}Step 3: Testing IKB endpoints...${NC}" + +# 3a. List all IKBs +echo -e "${BLUE}3a. Listing all IKBs...${NC}" +LIST_RESPONSE=$(make_authenticated_request "GET" "/floware/ikb/") +echo "List IKB Response: $LIST_RESPONSE" +echo "" + +# 3b. Get specific IKB info +echo -e "${BLUE}3b. Getting IKB info for ID: $IKB_ID...${NC}" +IKB_INFO_RESPONSE=$(make_authenticated_request "GET" "/floware/ikb/$IKB_ID") +echo "IKB Info Response: $IKB_INFO_RESPONSE" +echo "" + +# 3c. Add image to IKB +echo -e "${BLUE}3c. Adding image to IKB...${NC}" + +# Create request payload +REQUEST_PAYLOAD=$(cat << EOF +{ + "image_data": "$IMAGE_DATA_URL", + "reference_id": "test_reference_$(date +%s)", + "metadata": { + "description": "Test image for IKB via staging script", + "category": "test", + "source": "staging_test", + "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)" + } +} +EOF +) + +echo "Request payload size: $(echo "$REQUEST_PAYLOAD" | wc -c) characters" + +ADD_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$IKB_ID/add" "$REQUEST_PAYLOAD") +echo "Add Image Response: $ADD_RESPONSE" +echo "" + +# 3d. Search in IKB +echo -e "${BLUE}3d. Searching in IKB...${NC}" + +SEARCH_PAYLOAD=$(cat << EOF +{ + "image_data": "$IMAGE_DATA_URL", + "max_results": 5, + "threshold": 0.7 +} +EOF +) + +SEARCH_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$IKB_ID/search" "$SEARCH_PAYLOAD") +echo "Search Response: $SEARCH_RESPONSE" +echo "" + +# Step 4: Create a new IKB (optional test) +echo -e "${YELLOW}Step 4: Testing IKB creation...${NC}" + +CREATE_IKB_PAYLOAD=$(cat << EOF +{ + "name": "Test IKB $(date +%Y%m%d_%H%M%S)", + "description": "Test IKB created via staging script", + "ikb_type": "photo_matching", + "algorithm_type": "sift", + "config": { + "max_keypoints": 5000, + "match_threshold": 0.7 + } +} +EOF +) + +CREATE_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/create" "$CREATE_IKB_PAYLOAD") +echo "Create IKB Response: $CREATE_RESPONSE" + +# Extract new IKB ID if creation was successful +NEW_IKB_ID=$(echo "$CREATE_RESPONSE" | jq -r '.data.ikb_id // empty') + +if [ -n "$NEW_IKB_ID" ] && [ "$NEW_IKB_ID" != "null" ]; then + echo -e "${GREEN}โœ“ Successfully created new IKB with ID: $NEW_IKB_ID${NC}" + + # Test adding image to new IKB + echo -e "${BLUE}Adding image to newly created IKB...${NC}" + ADD_TO_NEW_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$NEW_IKB_ID/add" "$REQUEST_PAYLOAD") + echo "Add to New IKB Response: $ADD_TO_NEW_RESPONSE" + echo "" + + # Clean up: Delete the test IKB + echo -e "${BLUE}Cleaning up: Deleting test IKB...${NC}" + DELETE_RESPONSE=$(make_authenticated_request "DELETE" "/floware/ikb/$NEW_IKB_ID") + echo "Delete Response: $DELETE_RESPONSE" +else + echo -e "${YELLOW}โš  IKB creation may have failed or returned unexpected format${NC}" +fi + +echo "" +echo -e "${GREEN}=== Testing Complete ===${NC}" +echo "All API endpoints have been tested successfully!" +echo "" +echo -e "${BLUE}Summary of tested endpoints:${NC}" +echo "โœ“ POST /floware/v1/authenticate - Authentication" +echo "โœ“ GET /floware/ikb/ - List IKBs" +echo "โœ“ GET /floware/ikb/{ikb_id} - Get IKB info" +echo "โœ“ POST /floware/ikb/{ikb_id}/add - Add image to IKB" +echo "โœ“ POST /floware/ikb/{ikb_id}/search - Search in IKB" +echo "โœ“ POST /floware/ikb/create - Create new IKB" +echo "โœ“ DELETE /floware/ikb/{ikb_id} - Delete IKB" diff --git a/wavefront/server/modules/inference_module/inference_module/__init__.py b/wavefront/server/modules/inference_module/inference_module/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/inference_module/inference_module/controllers/__init__.py b/wavefront/server/modules/inference_module/inference_module/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py b/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py new file mode 100644 index 00000000..af8a8b06 --- /dev/null +++ b/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py @@ -0,0 +1,244 @@ +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.model_schema import ModelSchema +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, File, Form, UploadFile, status, Query +from fastapi.responses import JSONResponse +from inference_module.inference_container import InferenceContainer +from flo_cloud.cloud_storage import CloudStorageManager +from db_repo_module.cache.cache_manager import CacheManager +from sqlalchemy import update, select, Result +import httpx +import uuid + + +inference_router = APIRouter() + + +async def handle_database_error(session, error_msg: str, error) -> JSONResponse: + """Handle database errors and return appropriate response.""" + await session.rollback() + logger.error(f'{error_msg} with error as {str(error)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=ResponseFormatter.buildErrorResponse(str(error)), + ) + + +@inference_router.post('/v1/model-repository/model') +@inject +async def model_loading( + model_type: str = Form(..., description='The type of the model'), + model_file: UploadFile = File(..., description='The model file to be uploaded'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + model_inference_repository: SQLAlchemyRepository[ModelSchema] = Depends( + Provide[InferenceContainer.model_inference_repository] + ), + cloud_storage_manager: CloudStorageManager = Depends( + Provide[InferenceContainer.cloud_storage_manager] + ), + config: InferenceContainer.config.provided = Depends( + Provide[InferenceContainer.config] + ), + cache_manager: CacheManager = Depends(Provide[CommonContainer.cache_manager]), +): + provider = config['cloud_config']['cloud_provider'] + model_storage_bucket = ( + config['gcp']['model_storage_bucket'] + if provider.lower() == 'gcp' + else config['aws']['model_storage_bucket'] + ) + if cache_manager.get_str(f"model_name_key_{model_file.filename.split('.')[0]}"): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Model details already found in the Model inference table' + ), + ) + else: + cache_manager.add( + f"model_name_key_{model_file.filename.split('.')[0]}", + model_file.filename.split('.')[0], + 600, + ) + filepath = f'inference/model/{model_file.filename}' + async with model_inference_repository.session() as session: + model_record = ModelSchema( + model_name=model_file.filename.split('.')[0], + model_path=filepath, + model_type=model_type, + ) + session.add(model_record) + await session.flush() + model_id = model_record.model_id + filename = model_file.filename.replace(' ', '_') + gcs_file_name = f'model_{model_id}/{filename}' + file_content = await model_file.read() + cloud_storage_manager.save_large_file( + file_content, + model_storage_bucket, + gcs_file_name, + ) + await session.commit() + await session.execute( + update(ModelSchema) + .where(ModelSchema.model_id == model_id) + .values(model_path=gcs_file_name) + ) + await session.commit() + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the model inference table and inserted the model deails successfully', + 'model_id': str(model_id), + } + ), + ) + + +@inference_router.post('/v1/model-repository/model/{model_id}/infer') +@inject +async def redirect_model_inference_api( + model_id: str, + data: dict, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + config: InferenceContainer.config.provided = Depends( + Provide[InferenceContainer.config] + ), + model_inference_repository: SQLAlchemyRepository[ModelSchema] = Depends( + Provide[InferenceContainer.model_inference_repository] + ), +): + model_info = await model_inference_repository.find_one(model_id=model_id) + if not model_info: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Model details not found in the Model inference table' + ), + ) + data['model_info'] = ModelSchema.to_dict(model_info) + internal_api_url = f"{config['model']['inference_service_url']}/inference/v1/model-repository/model/{model_id}/infer" + async with httpx.AsyncClient( + timeout=httpx.Timeout(60.0, connect=30.0), + limits=httpx.Limits( + max_keepalive_connections=20, max_connections=100, keepalive_expiry=60 + ), + ) as client: + response = await client.post(internal_api_url, json=data) + + # Log error if response status is not successful + if response.status_code != 201: + error_response_text = ( + response.text[:1000] + if hasattr(response, 'text') + else 'No response text available' + ) + logger.error( + f'Failed to call internal inference API. URL: {internal_api_url}, ' + f'Model ID: {model_id}, Status Code: {response.status_code}, ' + f'Error Response: {error_response_text}' + ) + # Return error response when internal API fails + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Internal inference API returned error: {error_response_text}' + ), + ) + + logger.debug(f'The response value is {response.json()}') + response = response.json().get('data', {}).get('results', {}) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse({'data': response}), + ) + + +@inference_router.get('/v1/model-repository/model') +@inject +async def list_all_models( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + limit: int = Query( + 10, ge=1, le=100, description='The maximum number of items to return' + ), + model_inference_repository: SQLAlchemyRepository[ModelSchema] = Depends( + Provide[InferenceContainer.model_inference_repository] + ), +): + async with model_inference_repository.session() as session: + query = select(ModelSchema).slice(0, limit) + results: Result = await session.execute(query) + resources = results.scalars().all() + data = [res.to_dict() for res in resources] + return JSONResponse( + content=response_formatter.buildSuccessResponse({'data': data}), + status_code=status.HTTP_200_OK, + ) + + +@inference_router.get('/v1/model-repository/model/{model_id}') +@inject +async def list_model_from_id( + model_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + model_inference_repository: SQLAlchemyRepository[ModelSchema] = Depends( + Provide[InferenceContainer.model_inference_repository] + ), +): + model_info = await model_inference_repository.find_one(model_id=model_id) + if not model_info: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Model details not found in the Model inference table' + ), + ) + return JSONResponse( + content=response_formatter.buildSuccessResponse( + {'data': ModelSchema.to_dict(model_info)} + ), + status_code=status.HTTP_200_OK, + ) + + +@inference_router.delete('/v1/model-repository/model/{model_id}') +@inject +async def delete_model_from_id( + model_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + model_inference_repository: SQLAlchemyRepository[ModelSchema] = Depends( + Provide[InferenceContainer.model_inference_repository] + ), +): + model_info = await model_inference_repository.find_one(model_id=model_id) + if not model_info: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Model details not found in the Model inference table' + ), + ) + await model_inference_repository.delete_all(model_id=model_id) + return JSONResponse( + status_code=status.HTTP_204_NO_CONTENT, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Deleted the Model Inference record successfully', + 'model_id': str(model_id), + } + ), + ) diff --git a/wavefront/server/modules/inference_module/inference_module/inference_container.py b/wavefront/server/modules/inference_module/inference_module/inference_container.py new file mode 100644 index 00000000..4ba8c2ac --- /dev/null +++ b/wavefront/server/modules/inference_module/inference_module/inference_container.py @@ -0,0 +1,20 @@ +from db_repo_module.models.model_schema import ModelSchema +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector import containers +from dependency_injector import providers +from flo_cloud.cloud_storage import CloudStorageManager + + +class InferenceContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + db_client = providers.Dependency() + cache_manager = providers.Dependency() + model_inference_repository = providers.Singleton( + SQLAlchemyRepository[ModelSchema], + model=ModelSchema, + db_client=db_client, + ) + + cloud_storage_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) diff --git a/wavefront/server/modules/inference_module/pyproject.toml b/wavefront/server/modules/inference_module/pyproject.toml new file mode 100644 index 00000000..c5ca5c89 --- /dev/null +++ b/wavefront/server/modules/inference_module/pyproject.toml @@ -0,0 +1,40 @@ +[project] +name = "inference-module" +version = "0.1.0" +description = "Agents module for inference" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "db-repo-module", + "flo-cloud", +] + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } +flo-cloud = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["inference_module"] diff --git a/wavefront/server/modules/inference_module/tests/conftest.py b/wavefront/server/modules/inference_module/tests/conftest.py new file mode 100644 index 00000000..e3e1addf --- /dev/null +++ b/wavefront/server/modules/inference_module/tests/conftest.py @@ -0,0 +1,179 @@ +import json +from unittest.mock import Mock, AsyncMock +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer +from inference_module.controllers.inference_controller import inference_router +from inference_module.inference_container import InferenceContainer +from dependency_injector import providers + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + user_container = UserContainer() + common_container = CommonContainer() + + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'device_info': 'Mozilla/5.0'} + ) + # For reset password + cache_manager_mock.get_str.side_effect = ( + lambda key: test_user_id + if key == 'mock_reset_code' + else json.dumps({'user_id': test_user_id, 'device_info': 'Mozilla/5.0'}) + ) + cache_manager_mock.add = Mock() + + user_container.db_client.override(db_repo_container.db_client) + user_container.cache_manager.override(cache_manager_mock) + common_container.cache_manager.override(cache_manager_mock) + + # Mock token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + 'code': 'mock_reset_code', + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + auth_container = AuthContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + auth_container.token_service.override(mock_token_service) + + # mocking auth container superset_service + mock_superset_service = Mock() + mock_superset_service.generate_guest_token.return_value = 'mock_guest_token' + auth_container.superset_service.override(mock_superset_service) + + inference_container = InferenceContainer( + db_client=db_repo_container.db_client, + cache_manager=user_container.cache_manager, + ) + + # Explicitly mock CloudStorageManager with a string provider + mock_cloud_storage_manager_instance = Mock() + mock_cloud_storage_manager_instance.save_large_file = AsyncMock(return_value=None) + + inference_container.cloud_storage_manager.override( + providers.Singleton(lambda: mock_cloud_storage_manager_instance) + ) + + inference_container.wire(packages=['inference_module.controllers']) + common_container.wire( + packages=['auth_module.controllers', 'inference_module.controllers'] + ) + auth_container.wire( + packages=[ + 'user_management_module.authorization', + ] + ) + user_container.wire( + packages=[ + 'user_management_module.authorization', + ] + ) + + # Mock config_service + mock_config_service = Mock() + mock_config_service.config = { + 'cloud_config': {'cloud_provider': 'gcp'}, + 'gcp': {'model_storage_bucket': 'test_bucket'}, + 'aws': {'model_storage_bucket': 'test_bucket'}, + } + inference_container.config.override( + providers.Singleton(lambda: mock_config_service.config) + ) + + yield auth_container, common_container, inference_container + auth_container.unwire() + common_container.unwire() + inference_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequireAuthMiddleware) + app.include_router(inference_router, prefix='/floware') + return TestClient(app) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token diff --git a/wavefront/server/modules/inference_module/tests/test_inference_controller.py b/wavefront/server/modules/inference_module/tests/test_inference_controller.py new file mode 100644 index 00000000..bcf2d1e2 --- /dev/null +++ b/wavefront/server/modules/inference_module/tests/test_inference_controller.py @@ -0,0 +1,214 @@ +import pytest +from fastapi import status +from sqlalchemy.ext.asyncio import AsyncSession +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.model_schema import ModelSchema +import io +from unittest.mock import Mock +import uuid + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_load_model_sucess( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + auth_container, common_container, inference_container = setup_containers + await create_session(test_session, test_user_id, test_session_id) + dummy_model_content = b'This is a dummy model file content.' + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = {} + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + dummy_file = io.BytesIO(dummy_model_content) + response = test_client.post( + '/floware/v1/model-repository/model', + data={'model_type': 'pytorch'}, + files={ + 'model_file': ('test_model.pth', dummy_file, 'application/octet-stream') + }, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_load_model_tensorflow_success( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + auth_container, common_container, inference_container = setup_containers + await create_session(test_session, test_user_id, test_session_id) + dummy_model_content = b'This is a dummy tensorflow model file content.' + dummy_file = io.BytesIO(dummy_model_content) + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = {} + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + response = test_client.post( + '/floware/v1/model-repository/model', + data={'model_type': 'tensorflow'}, + files={'model_file': ('test_model.tf', dummy_file, 'application/octet-stream')}, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 201 + assert ( + response.json()['data']['message'] + == 'Created the model inference table and inserted the model deails successfully' + ) + + +@pytest.mark.asyncio +async def test_load_model_no_file( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.post( + '/floware/v1/model-repository/model', + data={'model_type': 'pytorch'}, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + assert 'Field required' in response.json()['detail'][0]['msg'] + + +@pytest.mark.asyncio +async def test_list_models_success( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + auth_container, common_container, inference_container = setup_containers + + await create_session(test_session, test_user_id, test_session_id) + # Insert a dummy model record into the database + model_id = str(uuid.uuid4()) + model_repo = inference_container.model_inference_repository() + async with model_repo.session() as session: + model_record = ModelSchema( + model_id=model_id, + model_name='test_model', + model_path=f'model_{model_id}/test_model.pth', + model_type='pytorch', + ) + session.add(model_record) + await session.commit() + response = test_client.get( + '/floware/v1/model-repository/model', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert len(response.json()['data']['data']) >= 1 + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.asyncio +async def test_list_models_with_no_db_entries( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + await create_session(test_session, test_user_id, test_session_id) + # Insert a dummy model record into the database + response = test_client.get( + '/floware/v1/model-repository/model', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert len(response.json()['data']['data']) == 0 + + +@pytest.mark.asyncio +async def test_list_model_with_id_success( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + auth_container, common_container, inference_container = setup_containers + + await create_session(test_session, test_user_id, test_session_id) + # Insert a dummy model record into the database + model_id = str(uuid.uuid4()) + model_repo = inference_container.model_inference_repository() + async with model_repo.session() as session: + model_record = ModelSchema( + model_id=model_id, + model_name='test_model', + model_path=f'model_{model_id}/test_model.pth', + model_type='pytorch', + ) + session.add(model_record) + await session.commit() + response = test_client.get( + f'/floware/v1/model-repository/model/{model_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert str(response.json()['data']['data']['model_id']) == str(model_id) + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.asyncio +async def test_list_model_with_id_failure( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + auth_container, common_container, inference_container = setup_containers + + await create_session(test_session, test_user_id, test_session_id) + # Insert a dummy model record into the database + model_id = str(uuid.uuid4()) + response = test_client.get( + f'/floware/v1/model-repository/model/{model_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert ( + response.json()['meta']['error'] + == 'Model details not found in the Model inference table' + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/wavefront/server/modules/insights_module/insights_module/controllers/dynamic_query_controller.py b/wavefront/server/modules/insights_module/insights_module/controllers/dynamic_query_controller.py new file mode 100644 index 00000000..3227b6e7 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/controllers/dynamic_query_controller.py @@ -0,0 +1,99 @@ +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import Query +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter +from insights_module.insights_container import InsightsContainer +from insights_module.service.dynamic_query_service import DynamicQueryService +from insights_module.utils.helper import fetch_data_filters +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService + + +dynamic_query_router = APIRouter() + + +@inject +async def check_admin( + role_id: str, + role_repositroy: SQLAlchemyRepository[Role] = Depends( + Provide(AuthContainer.role_repository) + ), +) -> bool: + role = await role_repositroy.find_one(id=role_id) + if not role: + return False + return role.name == 'admin' + + +@dynamic_query_router.get('/dynamic-queries/{query_id}') +@inject +async def execute_dynamic_query( + request: Request, + query_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + dynamic_query_service: DynamicQueryService = Depends( + Provide[InsightsContainer.dynamic_query_service] + ), + filter: str | None = Query(None, alias='$filter'), + start_date: str | None = None, + end_date: str | None = None, + limit: str | None = None, + offset: str | None = None, + force: str | None = None, +): + user_id = request.state.session.user_id + role_id = request.state.session.role_id + + if not dynamic_query_service.is_valid_query(query_id): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Invalid query ID or query params' + ), + ) + + rls_filter_str = None + is_admin = await check_admin(role_id) + if not is_admin: + rls_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + + if len(rls_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + rls_filters = fetch_data_filters(rls_filters) + rls_filter_str = f"{ ' $and '.join(rls_filters)}" + + all_query_params = dict(request.query_params) + query_results = await dynamic_query_service.execute_dynamic_query( + query_id=query_id, + params=all_query_params, + filter=filter, + rls_filter_str=rls_filter_str, + limit=limit, + offset=offset, + force=(force == 'true'), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(query_results), + ) diff --git a/wavefront/server/modules/insights_module/insights_module/controllers/pdo_controller.py b/wavefront/server/modules/insights_module/insights_module/controllers/pdo_controller.py new file mode 100644 index 00000000..3d24cfa4 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/controllers/pdo_controller.py @@ -0,0 +1,223 @@ +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from common_module.utils.serializer import serialize_values +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.user import User +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.user_role import UserRole +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import Query +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter +from insights_module.insights_container import InsightsContainer +from insights_module.service.pdo_service import PdoService +from insights_module.utils.helper import fetch_data_filters +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from sqlalchemy import Result +from sqlalchemy import select +from dataclasses import dataclass + +pdo_router = APIRouter() + + +@dataclass +class UpdateRequest: + data: dict + + +@inject +async def check_admin( + role_id: str, + role_repositroy: SQLAlchemyRepository[Role] = Depends( + Provide(AuthContainer.role_repository) + ), +) -> bool: + role = await role_repositroy.find_one(id=role_id) + if not role: + return False + return role.name == 'admin' + + +@pdo_router.get('/{resource_name}') +@inject +async def fetch_pvo_records( + request: Request, + resource_name: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[AuthContainer.resource_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + cloud_service: PdoService = Depends(Provide[InsightsContainer.cloud_service]), + filter: str | None = Query(None, alias='$filter'), + limit: str | None = None, + offset: str | None = None, +): + user_id = request.state.session.user_id + role_id = request.state.session.role_id + + if resource_name not in [ + 'parsed_data_object', + 'rf_parsed_data_object', + 'rf_gold_data_object', + 'rf_gold_item_details', + ]: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid resource name: {resource_name}' + ), + ) + + if resource_name == 'parsed_data_object': + resource_name = 'rf_parsed_data_object' + + data_filters = [] + is_admin = await check_admin(role_id) + if not is_admin: + data_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + + if len(data_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + data_filters = fetch_data_filters(data_filters) + if filter: + filter = f"{filter} $and ({' $and '.join(data_filters)})" + else: + filter = f"{ ' $and '.join(data_filters)}" + + pvo_records = cloud_service.fetch_upto_limit( + filter, limit, offset, table_name=resource_name + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'records': serialize_values(pvo_records)} + ), + ) + + +@pdo_router.patch('/{resource_name}/{id}') +@inject +async def patch_pvo_records( + request: Request, + resource_name: str, + id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[AuthContainer.resource_repository] + ), + cloud_service: PdoService = Depends(Provide[InsightsContainer.cloud_service]), + payload: UpdateRequest = None, +): + user_id = request.state.session.user_id + role_id = request.state.session.role_id + + if resource_name not in [ + 'parsed_data_object', + 'rf_parsed_data_object', + 'rf_gold_data_object', + 'rf_gold_item_details', + ]: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid resource name: {resource_name}' + ), + ) + + if resource_name == 'parsed_data_object': + resource_name = 'rf_parsed_data_object' + + data_filters = [] + is_admin = await check_admin(role_id) + if not is_admin: + async with resource_repository.session() as session: + statement = ( + select(Resource) + .join(RoleResource, Resource.id == RoleResource.resource_id) + .join(Role, Role.id == RoleResource.role_id) + .join(UserRole, UserRole.role_id == Role.id) + .join(User, UserRole.user_id == User.id) + .where(UserRole.user_id == user_id) + .where(User.deleted.is_(False)) + .where(Resource.scope == ResourceScope.DATA) + ) + result: Result = await session.execute(statement) + data_filters = result.scalars().all() + + if len(data_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + data_filters = fetch_data_filters(data_filters) + + cloud_service.patch_record_by_id( + id=id, + table_name=resource_name, + rls_filter=data_filters, + update_data=payload.data, + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Successfully updated the records'} + ), + ) + + +@pdo_router.get('/parsed_data_object/audio') +@inject +async def fetch_audio( + resource_url: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + cloud_service: PdoService = Depends(Provide[InsightsContainer.cloud_service]), +): + audio_url = cloud_service.fetch_audio(resource_url) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'audio_url': audio_url}), + ) + + +@pdo_router.get('/parsed_data_object/transcript') +@inject +async def fetch_transcript( + resource_url: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + cloud_service: PdoService = Depends(Provide[InsightsContainer.cloud_service]), +): + transcripts = cloud_service.fetch_transcript(resource_url) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(transcripts), + ) diff --git a/wavefront/server/modules/insights_module/insights_module/controllers/router.py b/wavefront/server/modules/insights_module/insights_module/controllers/router.py new file mode 100644 index 00000000..83608a11 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/controllers/router.py @@ -0,0 +1,7 @@ +from fastapi.routing import APIRouter +from insights_module.controllers.dynamic_query_controller import dynamic_query_router +from insights_module.controllers.pdo_controller import pdo_router + +insights_router = APIRouter() +insights_router.include_router(dynamic_query_router, prefix='/v1/insights') +insights_router.include_router(pdo_router, prefix='/v1/insights') diff --git a/wavefront/server/modules/insights_module/insights_module/db/bigquery_connector.py b/wavefront/server/modules/insights_module/insights_module/db/bigquery_connector.py new file mode 100644 index 00000000..23f94403 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/db/bigquery_connector.py @@ -0,0 +1,85 @@ +from dataclasses import dataclass +import datetime + +from common_module.log.logger import logger +from google.cloud import bigquery + + +@dataclass +class BigQueryConfig: + project_id: str + dataset_id: str + location: str = 'asia-south1' + + +class BigQueryConnector: + def __init__(self, bq_config: BigQueryConfig): + self.config = bq_config + self.client = self.__get_client() + + def __get_client(self): + try: + bq_client = bigquery.Client( + project=self.config.project_id, location='asia-south1' + ) + return bq_client + except Exception as e: + logger.error(f'Connection error: {str(e)}') + raise e + + def execute_query(self, query: str, parameters: dict = None): + try: + logger.debug(f'Executing query: {query}') + logger.debug(f'Parameters: {parameters}') + + job_config = bigquery.QueryJobConfig() + if parameters: + query_params = [] + for key, value in parameters.items(): + if isinstance(value, str): + query_params.append( + bigquery.ScalarQueryParameter(key, 'STRING', value) + ) + elif isinstance(value, int): + query_params.append( + bigquery.ScalarQueryParameter(key, 'INT64', value) + ) + elif isinstance(value, float): + query_params.append( + bigquery.ScalarQueryParameter(key, 'FLOAT64', value) + ) + elif isinstance(value, bool): + query_params.append( + bigquery.ScalarQueryParameter(key, 'BOOL', value) + ) + elif isinstance(value, datetime.datetime): + query_params.append( + bigquery.ScalarQueryParameter(key, 'TIMESTAMP', value) + ) + else: + query_params.append( + bigquery.ScalarQueryParameter(key, 'STRING', str(value)) + ) + + job_config.query_parameters = query_params + + query_job = self.client.query(query, job_config=job_config) + result = query_job.result() + + # Convert RowIterator to list immediately to avoid iterator exhaustion + rows = list(result) + column_names = [field.name for field in result.schema] + + if query.strip().upper().startswith('INSERT'): + logger.info( + f'Insert completed. Affected rows: {query_job.num_dml_affected_rows}' + ) + return rows, column_names + + except Exception as e: + logger.error( + f'Query execution failed: {str(e)}\n' + f'Query: {query}\n' + f'Parameters: {parameters}' + ) + raise e diff --git a/wavefront/server/modules/insights_module/insights_module/db/redshift_connector.py b/wavefront/server/modules/insights_module/insights_module/db/redshift_connector.py new file mode 100644 index 00000000..9d14cd1f --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/db/redshift_connector.py @@ -0,0 +1,151 @@ +from contextlib import contextmanager +from dataclasses import dataclass +from functools import wraps +import time + +from common_module.log.logger import logger +import redshift_connector +from redshift_connector.core import Connection + + +@dataclass +class RedshiftConfig: + username: str + password: str + host: str + port: str + db_name: str + read_only: bool = False + + +def retry_on_connection_error(max_retries=3, delay=1, timeout=30): + def decorator(func): + @wraps(func) + def wrapper(self: 'RedshiftConnector', *args, **kwargs): + retries = 0 + last_exception = None + + while retries < max_retries: + try: + kwargs.pop('connection', None) + with self.get_connection(timeout) as conn: + return func(self, *args, **kwargs, connection=conn) + except ( + redshift_connector.Error, + redshift_connector.OperationalError, + ) as e: + last_exception = e + retries += 1 + logger.warning( + f'Database connection error: {str(e)}. ' + f'Attempt {retries} of {max_retries}' + ) + + if retries == max_retries: + logger.error( + f'Max retries reached. Last error: {str(last_exception)}' + ) + raise last_exception + + time.sleep(delay * retries) # Exponential backoff + return None + + return wrapper + + return decorator + + +class RedshiftConnector: + def __init__(self, redshift_config: RedshiftConfig): + self.config = redshift_config + + @contextmanager + def get_connection(self, timeout=300): + connection = None + try: + connection: Connection = redshift_connector.connect( + host=self.config.host, + port=int(self.config.port), + database=self.config.db_name, + user=self.config.username, + password=self.config.password, + timeout=timeout, + ssl=True, + tcp_keepalive=True, + ) + redshift_connector.paramstyle = 'named' + + if self.config.read_only: + logger.debug('Making read only connection to redshfit') + cursor = connection.cursor() + cursor.execute('SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY') + cursor.close() + + yield connection + except Exception as e: + logger.error(f'Connection error: {str(e)}') + raise e + finally: + self.__close_connection(connection=connection) + + def __close_connection(self, connection: Connection): + try: + if connection: + connection.close() + except Exception as e: + logger.error(f'Connection closing error: {str(e)}') + raise e + + @retry_on_connection_error() + def execute_query( + self, query: str, parameters: dict | None = None, connection: Connection = None + ): + try: + if self.config.read_only: + query_upper = query.strip().upper() + write_operations = ( + 'INSERT', + 'UPDATE', + 'DELETE', + 'CREATE', + 'DROP', + 'ALTER', + 'TRUNCATE', + ) + if any(query_upper.startswith(op) for op in write_operations): + raise ValueError( + 'Write operations are not allowed in read-only mode' + ) + + logger.debug(f'Executing query: {query}') + logger.debug(f'Parameters: {parameters}') + + cursor = connection.cursor() + + redshift_connector.paramstyle = 'named' + if parameters: + cursor.execute(query, parameters) + else: + cursor.execute(query) + + if query.strip().upper().startswith('INSERT'): + logger.info(f'Insert completed. Rowcount: {cursor.rowcount}') + return cursor.rowcount + + try: + results = cursor.fetchall() + column_names = [desc[0] for desc in cursor.description] + return results, column_names + except redshift_connector.ProgrammingError: + return cursor.rowcount + finally: + cursor.close() + + except Exception as e: + logger.error( + f'Query execution failed: {str(e)}\n' + f'Query: {query}\n' + f'Parameters: {parameters}' + ) + + raise e diff --git a/wavefront/server/modules/insights_module/insights_module/insights_container.py b/wavefront/server/modules/insights_module/insights_module/insights_container.py new file mode 100644 index 00000000..211ded47 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/insights_container.py @@ -0,0 +1,84 @@ +import os + +from dependency_injector import containers +from dependency_injector import providers +from insights_module.db.bigquery_connector import BigQueryConfig +from insights_module.db.bigquery_connector import BigQueryConnector +from insights_module.db.redshift_connector import RedshiftConfig +from insights_module.db.redshift_connector import RedshiftConnector +from insights_module.repository.pvo_repository import PVORepository +from insights_module.service.dynamic_query_service import DynamicQueryService +from insights_module.service.insights_service import InsightsService +from insights_module.service.pdo_service import AWSServices +from insights_module.service.pdo_service import GCPServices +from insights_module.service.usage_metric_service import UsageMetricService +from flo_cloud.cloud_storage import CloudStorageManager + + +class InsightsContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['./config.ini']) + + notification_repository = providers.Dependency() + + cache_manager = providers.Dependency() + + cloud_provider = os.environ.get('CLOUD_PROVIDER', 'aws') + + if cloud_provider == 'aws': + redshift_config = providers.Factory( + RedshiftConfig, + username=config.redshift.username, + password=config.redshift.password, + host=config.redshift.host, + port=config.redshift.port, + db_name=config.redshift.db_name, + ) + connector = providers.Singleton(RedshiftConnector, redshift_config) + elif cloud_provider == 'gcp': + bq_config = providers.Factory( + BigQueryConfig, + project_id=config.bigquery.project_id, + dataset_id=config.bigquery.dataset_id, + ) + connector = providers.Singleton(BigQueryConnector, bq_config) + + pvo_repository = providers.Singleton( + PVORepository, + connector, + dataset_id=config.bigquery.dataset_id, + ) + + insights_service = providers.Singleton( + InsightsService, + repository=pvo_repository, + today_as_max_from_db=config.insights.today_as_max_from_db, + ) + usage_metric_service = providers.Singleton( + UsageMetricService, + repository=pvo_repository, + cloud_provider=cloud_provider, + ) + + if cloud_provider == 'aws': + cloud_service = providers.Singleton( + AWSServices, + insights_service=insights_service, + transcript_bucket_name=config.aws.transcript_bucket_name, + audio_bucket_name=config.aws.audio_bucket_name, + ) + elif cloud_provider == 'gcp': + cloud_service = providers.Singleton( + GCPServices, + insights_service=insights_service, + transcript_bucket_name=config.gcp.transcript_bucket_name, + audio_bucket_name=config.gcp.audio_bucket_name, + ) + + colud_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) + dynamic_query_service = providers.Singleton( + DynamicQueryService, + pvo_repository=pvo_repository, + cache_manager=cache_manager, + ) diff --git a/wavefront/server/modules/insights_module/insights_module/models/dymanic_query.py b/wavefront/server/modules/insights_module/insights_module/models/dymanic_query.py new file mode 100644 index 00000000..c56156d0 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/models/dymanic_query.py @@ -0,0 +1,30 @@ +from dataclasses import dataclass +from dataclasses import field +from typing import List + + +@dataclass +class QueryParameter: + name: str + type: str + + +@dataclass +class QueryParameterValue: + name: str + value: str + + +@dataclass +class Query: + id: str + description: str + query: str + parameters: List[QueryParameter] = field(default_factory=list) + + +@dataclass +class DynamicQuery: + id: str + name: str + queries: List[Query] diff --git a/wavefront/server/modules/insights_module/insights_module/models/insights_signal.py b/wavefront/server/modules/insights_module/insights_module/models/insights_signal.py new file mode 100644 index 00000000..9997442e --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/models/insights_signal.py @@ -0,0 +1,122 @@ +from dataclasses import asdict +from dataclasses import dataclass +from datetime import date +from enum import Enum +import math +from typing import Dict, List + +from insights_module.models.insights_signal_query import Threshold + + +class AlertType(str, Enum): + L7D = 'L7D' + L30D = 'L30D' + goal_line = 'goal_line' + + @staticmethod + def resolve(type: str): + if type == 'L7D': + return AlertType.L7D + if type == 'L30D': + return AlertType.L30D + if type == 'L90D': + return AlertType.L30D + if type == 'goal_line': + return AlertType.goal_line + else: + return ValueError(f'Unknown alert type: {type}') + + +def serialize_values(value): + if isinstance(value, date): + return value.isoformat() # Convert date to string ('YYYY-MM-DD') + elif isinstance(value, Enum): + return value.value # Convert Enum to its string representation + elif isinstance(value, float) and (math.isnan(value) or math.isinf(value)): + return None # Handle NaN, inf, -inf safely + elif isinstance(value, list): + return [serialize_values(v) for v in value] # Recursively handle lists + elif isinstance(value, dict): + return { + k: serialize_values(v) for k, v in value.items() + } # Recursively handle dicts + return value # Return other types as-is + + +@dataclass +class Alert: + metric: str + threshold: float + previous_value: float + current_value: float + diff_value: float + type: AlertType + + def to_dict(self): + data = asdict(self) + data['type'] = self.type.value + return data + + +@dataclass +class Metric: + metric: str + name: str + value: float + + +@dataclass +class ActionableAlerts: + id: str + title: str + type: str + name: str + description: str + alerts: List[Alert] + + def has_alerts(self): + return len(self.alerts) > 0 + + +@dataclass +class DataPoints: + window_type: str + old_window: Dict[str, List] + new_window: Dict[str, List] + + +@dataclass +class DetailedInsights: + metrices: List[Metric] + data_points: DataPoints + goal_lines: List[Threshold] + + def to_dict(self): + return { + 'metrices': [asdict(m) for m in self.metrices], + 'data_points': serialize_values(asdict(self.data_points)), + 'goal_lines': [asdict(g) for g in self.goal_lines], + } + + +@dataclass +class ActionableInsights: + id: str + title: str + type: str + name: str + description: str + alerts: List[Alert] + details: DetailedInsights + + @staticmethod + def to_actionable(alerts: ActionableAlerts, insights: DetailedInsights): + return ActionableInsights( + id=alerts.id, + name=alerts.name, + title=alerts.title, + type=alerts.type, + description=alerts.description, + alerts=alerts.alerts, + details=insights, + ) diff --git a/wavefront/server/modules/insights_module/insights_module/models/insights_signal_query.py b/wavefront/server/modules/insights_module/insights_module/models/insights_signal_query.py new file mode 100644 index 00000000..a5e078ae --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/models/insights_signal_query.py @@ -0,0 +1,96 @@ +from dataclasses import dataclass +from dataclasses import field +from typing import List + +import dacite +import yaml + + +@dataclass +class Threshold: + metric: str + threshold: float + + +@dataclass +class Periodicity: + period: str + alerts: List[Threshold] + + +@dataclass +class Projection: + sql: str + metric: str + name: str + + +@dataclass +class Projections: + parent: List[Projection] + children: List[Projection] = field(default_factory=list) + + +@dataclass +class Variable: + name: str + + +@dataclass +class Query: + sql: str + variables: List[Variable] = field(default_factory=list) + + +@dataclass +class Plot: + name: str + metrices: List[Projection] + + +@dataclass +class SignalQuery: + id: str + name: str + title: str + description: str + projections: Projections + query: Query + version: int + type: str + periodicity: List[Periodicity] + plots: List[Plot] + goal_lines: List[Threshold] = field(default_factory=list) + + +def load_yaml_to_signal(signals: str) -> SignalQuery: + yaml_data = [] + for signal in signals: + yaml_data.append( + dacite.from_dict( + data_class=SignalQuery, + data={ + 'id': signal.id, + 'name': signal.name, + 'title': signal.title, + 'description': signal.description, + 'projections': signal.projections, + 'query': signal.query, + 'version': signal.version, + 'type': signal.type, + 'periodicity': signal.periodicity, + 'plots': signal.plots, + 'goal_lines': signal.goal_lines, + }, + ) + ) + return yaml_data + + +def load_yaml_from_str(yaml_str: str) -> SignalQuery: + yml_dict = yaml.safe_load(yaml_str) + return dacite.from_dict(data_class=SignalQuery, data=yml_dict) + + +def load_from_dict(data: dict) -> SignalQuery: + return dacite.from_dict(data_class=SignalQuery, data=data) diff --git a/wavefront/server/modules/insights_module/insights_module/models/lead_signal_query.py b/wavefront/server/modules/insights_module/insights_module/models/lead_signal_query.py new file mode 100644 index 00000000..a3adadff --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/models/lead_signal_query.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, List + + +@dataclass +class LeadQuery: + product_category: str + lead_type: str + query: str + periodicity: List[Dict[str, str]] + + +@dataclass +class QueryWindow: + start: datetime + end: datetime diff --git a/wavefront/server/modules/insights_module/insights_module/models/leads_aggreegate.py b/wavefront/server/modules/insights_module/insights_module/models/leads_aggreegate.py new file mode 100644 index 00000000..3fb1570d --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/models/leads_aggreegate.py @@ -0,0 +1,12 @@ +class LeadAggregate: + def __init__(self, product_name, type, count): + self.product_name = product_name + self.type = type + self.count = count + + def to_dict(self): + return { + 'product_name': self.product_name, + 'type': self.type, + 'count': self.count, + } diff --git a/wavefront/server/modules/insights_module/insights_module/repository/pvo_repository.py b/wavefront/server/modules/insights_module/insights_module/repository/pvo_repository.py new file mode 100644 index 00000000..1243f23f --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/repository/pvo_repository.py @@ -0,0 +1,231 @@ +import os +from typing import Dict, List + +from common_module.log.logger import logger +import dacite +from insights_module.db.redshift_connector import RedshiftConnector +from insights_module.models.dymanic_query import DynamicQuery +from insights_module.models.lead_signal_query import LeadQuery +from insights_module.models.insights_signal import serialize_values +import yaml + +# Define default project paths +DEFAULT_PROJECT_PATH = 'apps/floware/floware' + + +class PVORepository: + def __init__( + self, + redshift_connector: RedshiftConnector, + dataset_id: str = None, + ): + self.dataset_id = dataset_id + self.connector = redshift_connector + self.project_path = os.getenv('PROJECT_PATH', DEFAULT_PROJECT_PATH) + + def __load_yaml_files(self, directory: str, data_class: type) -> List: + """Load and parse YAML files from directory into list of data_class objects""" + results = [] + for filename in os.listdir(directory): + if filename.endswith(('.yaml', '.yml')): + file_path = os.path.join(directory, filename) + with open(file_path, 'r') as f: + yaml_data = yaml.safe_load(f) + if isinstance(yaml_data, list): + for item in yaml_data: + obj = dacite.from_dict(data_class=data_class, data=item) + results.append(obj) + else: + obj = dacite.from_dict(data_class=data_class, data=yaml_data) + results.append(obj) + return results + + def __get_asset_directory(self, asset_type: str, env_var: str) -> str: + """Get normalized directory path for asset files""" + root_dir = os.path.dirname( + os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + ) + ) + ) + base_dir = os.path.join(root_dir, DEFAULT_PROJECT_PATH) + directory_path = os.getenv(env_var, '') # Provide empty string as default + directory = os.path.join(base_dir, f'assets/{asset_type}', directory_path) + return os.path.normpath(directory) + + def fetch_signal_from_yaml(self) -> List[LeadQuery]: + directory = self.__get_asset_directory('leads', 'LEADS_DIR') + return self.__load_yaml_files(directory, LeadQuery) + + def fetch_dynamic_queries(self) -> List[DynamicQuery]: + directory = self.__get_asset_directory('dynamic_queries', 'DYNAMIC_QUERIES_DIR') + return self.__load_yaml_files(directory, DynamicQuery) + + def fetch_pvo_record( + self, + odata_condition: str, + params: Dict, + limit: str | None = None, + offset: str | None = None, + table_name: str = 'rf_parsed_data_object', + ) -> Dict[str, List]: + table_name = self.__resolve_table_name(self.dataset_id, table_name=table_name) + template_to_run = f'SELECT * FROM {table_name}' + if odata_condition: + template_to_run += f' WHERE {odata_condition}' + + template_to_run += ' ORDER BY start_time DESC' + + if limit: + template_to_run += f' LIMIT {limit}' + else: + template_to_run += ' LIMIT 1' + if offset: + template_to_run += f' OFFSET {offset}' + + results, column_names = self.connector.execute_query( + template_to_run, parameters=params + ) + return self.__format_results_json(results, column_names) + + def update_pvo_record( + self, + id: str, + update_data: Dict[str, str], + table_name: str, + odata_condition: str | None = None, + rls_params: Dict[str, str] = {}, + ) -> None: + odata_condition = odata_condition or 'TRUE' + cloud_provider = os.environ.get('CLOUD_PROVIDER', 'aws') + param_symbol = '@' if cloud_provider == 'gcp' else ':' + table_name = self.__resolve_table_name(self.dataset_id, table_name=table_name) + set_clause = ', '.join([f'{key} = @{key}' for key in update_data.keys()]) + query_to_run = f'UPDATE {table_name} SET {set_clause} WHERE id = {param_symbol}id AND {odata_condition}' + params = {**update_data, 'id': id, **(rls_params if rls_params else {})} + logger.info(f'Running query: {query_to_run} with params: {params}') + self.connector.execute_query(query_to_run, parameters=params) + + def fetch_insights( + self, query: str, projection: str, start_date: str, end_date: str + ) -> Dict[str, List]: + template_to_run = f'SELECT {projection} FROM ({query}) LIMIT 100' + query_to_run = template_to_run.replace('{{start_date}}', start_date).replace( + '{{end_date}}', end_date + ) + logger.debug(f'Running query: {query_to_run}') + results, column_names = self.connector.execute_query(query_to_run) + + return self.__format_results(results, column_names) + + def get_max_record_date(self) -> str | None: + table_name = self.__resolve_table_name(self.dataset_id) + query_to_run = f'SELECT MAX(start_time) as max_start_time FROM {table_name}' + logger.debug(f'Running query: {query_to_run}') + results, column_names = self.connector.execute_query(query_to_run) + formatted_outputs = self.__format_results(results, column_names) + max_date = None + if ( + 'max_start_time' in formatted_outputs + and len(formatted_outputs['max_start_time']) > 0 + ): + max_date = formatted_outputs['max_start_time'][0] + return max_date + + def fetch_raw_values( + self, query: str, projection: str, start_date: str, end_date: str + ) -> Dict[str, List]: + template_to_run = ( + f'SELECT start_date, {projection} FROM ({query}) GROUP BY start_date' + ) + query_to_run = template_to_run.replace('{{start_date}}', start_date).replace( + '{{end_date}}', end_date + ) + logger.debug(f'Running query: {query_to_run}') + results, column_names = self.connector.execute_query(query_to_run) + + return self.__format_results(results, column_names) + + def execute_query( + self, query: str, start_date: str, end_date: str + ) -> Dict[str, List]: + query_to_run = query.replace('{{start_date}}', start_date).replace( + '{{end_date}}', end_date + ) + logger.debug(f'Running query: {query_to_run}') + results, column_names = self.connector.execute_query(query_to_run) + + return self.__format_results(results, column_names) + + def execute_dynamic_query( + self, + query: str, + odata_filters: str, + odata_data_filter: str, + params: dict | None = None, + limit: str | None = None, + offset: str | None = None, + ) -> Dict[str, List]: + logger.debug(f'Running query: {query}') + + query = query.replace( + '{{rls}}', f'{odata_data_filter}' if odata_data_filter else 'TRUE' + ) + query = query.replace( + '{{filters}}', f'{odata_filters}' if odata_filters else 'TRUE' + ) + if limit: + query += f' LIMIT {limit}' + if offset: + query += f' OFFSET {offset}' + + results, column_names = self.connector.execute_query(query, parameters=params) + + return self.__format_results_json(results, column_names) + + def __format_results(self, results, column_names): + if not results: + return {col: [] for col in column_names} + + return {col: [row[i] for row in results] for i, col in enumerate(column_names)} + + def __format_results_json(self, results, column_names): + if not results: + return [] + + json_data = [] + for res in results: + result = {} + for i, col in enumerate(column_names): + result[col] = res[i] + json_data.append(result) + serialized_json = serialize_values(json_data) + return serialized_json + + def __resolve_table_name( + self, dataset_id: str = '', table_name='rf_parsed_data_object' + ): + full_table_name = table_name + if dataset_id: + full_table_name = f'{dataset_id}.{table_name}' + + return full_table_name + + def fetch_usage_metrics(self, start_time: str, end_time: str, cloud_provider: str): + table_name = self.__resolve_table_name(self.dataset_id) + dynamic_var_char = '@' if cloud_provider == 'gcp' else ':' + query = f""" + SELECT + COUNT(DISTINCT CASE WHEN rf_transcription_status = 'success' THEN conversation_id END) AS transcription_success, + COUNT(DISTINCT CASE WHEN rf_transcription_status = 'empty' THEN conversation_id END) AS transcription_empty, + COUNT(DISTINCT CASE WHEN rf_insights_status = 'success' THEN conversation_id END) AS insights_success, + COUNT(DISTINCT CASE WHEN rf_transcription_status = 'failure' OR rf_transcription_status IS NULL THEN conversation_id END) AS transcription_failure, + COUNT(DISTINCT CASE WHEN rf_insights_status = 'failure' OR rf_insights_status IS NULL THEN conversation_id END) AS insights_failure, + SUM(CASE WHEN rf_insights_status = 'success' THEN total_duration END) as total_call_duration + FROM {table_name} + WHERE created_at BETWEEN {dynamic_var_char}start_time AND {dynamic_var_char}end_time; + """ + params = {'start_time': start_time, 'end_time': end_time} + results, column_names = self.connector.execute_query(query, params) + return self.__format_results(results, column_names) diff --git a/wavefront/server/modules/insights_module/insights_module/service/dynamic_query_service.py b/wavefront/server/modules/insights_module/insights_module/service/dynamic_query_service.py new file mode 100644 index 00000000..9f7b227f --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/service/dynamic_query_service.py @@ -0,0 +1,168 @@ +import asyncio +import json +import hashlib + +from common_module.log.logger import logger +from common_module.utils.odata_parser import prepare_odata_filter +from db_repo_module.cache.cache_manager import CacheManager +from insights_module.models.dymanic_query import DynamicQuery +from insights_module.models.dymanic_query import Query +from insights_module.repository.pvo_repository import PVORepository + + +class DynamicQueryService: + def __init__( + self, + pvo_repository: PVORepository, + cache_manager: CacheManager, + ): + self.pvo_repository = pvo_repository + self.cache_manager = cache_manager + self.dynamic_query_map: dict[str, DynamicQuery] = dict() + self.__load_dynamic_queries() + + def __load_dynamic_queries(self): + all_dynamic_queries = self.pvo_repository.fetch_dynamic_queries() + for dynamic_query in all_dynamic_queries: + self.dynamic_query_map[dynamic_query.id] = dynamic_query + + def is_valid_query(self, query_id: str) -> bool: + return query_id in self.dynamic_query_map + + async def execute_dynamic_query( + self, + query_id: str, + filter: str | None = None, + rls_filter_str: str | None = None, + params: dict[str, str] | None = None, + limit: str = None, + offset: str = None, + force: bool = False, + ): + query = self.dynamic_query_map[query_id] + result_by_query = dict() + + logger.info(f'Executing dynamic query: {query_id}') + + # Create tasks for parallel execution + tasks = [] + for query in query.queries: + task = asyncio.create_task( + self.__execute_single_query( + query, + filter, + rls_filter_str, + params, + limit=limit, + offset=offset, + force=force, + ) + ) + tasks.append((query.id, task)) + + # Wait for all tasks to complete + for query_id, task in tasks: + result_by_query[query_id] = await task + + return result_by_query + + def __generate_cache_key( + self, + query: Query, + filter: str, + rls_filter_str: str, + params: dict, + limit: str = None, + offset: str = None, + ) -> str: + """Generate a unique cache key based on query parameters.""" + key_dict = { + 'query_id': query.id, + 'filter': filter, + 'rls_filter': rls_filter_str, + 'params': sorted(params.items()), + 'limit': limit, + 'offset': offset, + } + key_json = json.dumps(key_dict, sort_keys=True, separators=(',', ':')) + hash_digest = hashlib.md5(key_json.encode()).hexdigest() + return f'dynamic_query:{hash_digest}' + + async def __execute_single_query( + self, + query: Query, + filter: str | None = None, + rls_filter_str: str | None = None, + params: dict[str, dict[str, str]] | None = None, + limit: str = None, + offset: str = None, + force: bool = False, + ) -> dict: + try: + params_to_execute = dict() + odata_filter, odata_params = prepare_odata_filter(filter) + odata_data_filter, odata_data_params = prepare_odata_filter( + rls_filter_str, prefix='rls_' + ) + incoming_param_value: dict[str, str] = params + for qp in query.parameters: + if qp.name not in incoming_param_value: + raise ValueError( + f'Missing parameter: {qp.name} for query {query.id}' + ) + params_to_execute[qp.name] = incoming_param_value[qp.name] + + # Generate cache key + cache_key = self.__generate_cache_key( + query, filter, rls_filter_str, params_to_execute, limit, offset + ) + + # Try to get from cache first + cached_result = self.cache_manager.get_str(cache_key) + if cached_result and not force: + logger.info(f'Cache hit for query {query.id}') + return { + 'status': 'success', + 'error': None, + 'result': json.loads(cached_result), + } + + logger.info( + f'Executing query {query.id} with parameters: {params_to_execute}' + ) + + # TODO: If rls and filter have same columns mentioned, the behavior can be unpredictable. + if odata_params: + params_to_execute.update(odata_params) + if odata_data_params: + params_to_execute.update(odata_data_params) + + # Run the query in a thread pool since it's a blocking operation + result = await asyncio.to_thread( + self.pvo_repository.execute_dynamic_query, + query.query, + odata_filter, + odata_data_filter, + params_to_execute, + limit=limit, + offset=offset, + ) + + # Cache the result for 1 hour (3600 seconds) + self.cache_manager.add(cache_key, json.dumps(result), expiry=60 * 2) + + return { + 'status': 'success', + 'error': None, + 'description': query.description, + 'result': result, + } + except Exception as e: + logger.exception(e) + logger.error(f'Error executing query {query.id}: {str(e)}') + return { + 'status': 'error', + 'description': None, + 'error': 'Unexpected error while executing query', + 'result': [], + } diff --git a/wavefront/server/modules/insights_module/insights_module/service/insights_service.py b/wavefront/server/modules/insights_module/insights_module/service/insights_service.py new file mode 100644 index 00000000..322a4699 --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/service/insights_service.py @@ -0,0 +1,353 @@ +from dataclasses import dataclass +from datetime import datetime +from datetime import timedelta +from typing import Dict, List, Any + +from common_module.log.logger import logger +from insights_module.models.insights_signal import ActionableAlerts +from insights_module.models.insights_signal import Alert +from insights_module.models.insights_signal import AlertType +from insights_module.models.insights_signal import DataPoints +from insights_module.models.insights_signal import DetailedInsights +from insights_module.models.insights_signal import Metric +from insights_module.models.insights_signal_query import Periodicity +from insights_module.models.insights_signal_query import Projection +from insights_module.models.insights_signal_query import Projections +from insights_module.models.insights_signal_query import SignalQuery +from insights_module.models.insights_signal_query import Threshold +from insights_module.repository.pvo_repository import PVORepository + + +@dataclass +class QueryWindow: + name: str + start: datetime + end: datetime + alerts: List[Threshold] + + +class InsightsService: + def __init__( + self, + repository: PVORepository, + today_as_max_from_db: str = 'false', + ): + self.repository = repository + self.today_as_max_from_db = today_as_max_from_db == 'true' + self.current_date = datetime.today() - timedelta(days=1) + + if self.today_as_max_from_db: + max_date = self.repository.get_max_record_date() + logger.info(f'Configuring current date as: {max_date}') + self.current_date = max_date if max_date is not None else self.current_date + + def __get_windows(self, name: str, number_of_days: int, alerts: List[Threshold]): + today = self.current_date + logger.info(f'Max date used for running worker: {today}') + return ( + QueryWindow( + name=name, + start=(today - timedelta(days=number_of_days * 2)).strftime('%Y-%m-%d'), + end=(today - timedelta(days=number_of_days)).strftime('%Y-%m-%d'), + alerts=alerts, + ), + QueryWindow( + name=name, + start=(today - timedelta(days=number_of_days)).strftime('%Y-%m-%d'), + end=today.strftime('%Y-%m-%d'), + alerts=alerts, + ), + ) + + def __fetch_periods(self, peroids: list[Periodicity]): + diff_window = [] + for period in peroids: + if period.period.startswith('L') and period.period.endswith('D'): + period_day_count = int( + period.period.removeprefix('L').removesuffix('D') + ) + diff_window.append( + self.__get_windows(period.period, period_day_count, period.alerts) + ) + else: + logger.warning(f'Unknown periodicity found: {period.period}') + return diff_window + + def __fetch_parent_projections(self, projections: Projections): + projections: list[Projection] = projections.parent + projection_queries = [ + f'{projection.sql} as {projection.metric}' for projection in projections + ] + + return ','.join(projection_queries), projections + + def __fetch_child_projections(self, projections: Projections): + projections: list[Projection] = projections.children + projection_queries = [ + f'{projection.sql} as {projection.metric}' for projection in projections + ] + return ','.join(projection_queries), projections + + def __execute_periodic_insights( + self, query, projection_query, old_window: QueryWindow, new_window: QueryWindow + ): + old_window_value: dict = self.repository.fetch_insights( + query, + projection_query, + start_date=old_window.start, + end_date=old_window.end, + ) + + new_window_value: dict = self.repository.fetch_insights( + query, + projection_query, + start_date=new_window.start, + end_date=new_window.end, + ) + return old_window_value, new_window_value + + def __periodic_alerts( + self, + period_name: str, + old_window_value: float, + new_window_value: float, + possible_alerts: List[Threshold], + ) -> List[Alert]: + alerts_to_notify: List[Alert] = [] + for alert in possible_alerts: + metric = alert.metric + threshold = alert.threshold + + old_values = old_window_value.get(metric, []) + new_values = new_window_value.get(metric, []) + + if ( + old_values is None + or new_values is None + or len(old_values) == 0 + or len(new_values) == 0 + ): + logger.debug( + f'No value found for metric: {metric}, skipping alert creation' + ) + continue + + if new_values[0] is None or old_values[0] is None: + logger.debug( + f'Possibly missing data, old {old_values[0]} and new {new_values[0]}' + ) + continue + + diff_value = new_values[0] - old_values[0] + diff_percentage = diff_value / old_values[0] if old_values[0] != 0 else None + if self.__check_threshold(threshold, diff_percentage): + logger.info( + f'metric: {metric}, threshould: {threshold}, diff_percentage: {diff_percentage}, type: {period_name}' + ) + alerts_to_notify.append( + Alert( + metric=metric, + threshold=threshold, + diff_value=diff_percentage, + previous_value=old_values[0], + current_value=new_values[0], + type=AlertType.resolve(period_name), + ) + ) + return alerts_to_notify + + def __check_threshold(self, threshold: float, value: float): + if value is None: + return False + return (threshold > 0 and value > threshold) or ( + threshold < 0 and value < threshold + ) + + def __goal_line_alerts(self, goal_lines: List[Threshold], new_window_value): + alerts = [] + for line in goal_lines: + new_values = new_window_value.get(line.metric, []) + if len(new_values) == 0 or new_values[0] is None: + logger.debug( + f'No value found for metric: {line.metric}, skipping alert creation' + ) + continue + logger.info( + f'metric: {line.metric}, threshould: {line.threshold}, value: {new_values[0]}, type: {AlertType.goal_line}' + ) + if (line.threshold > 0 and new_values[0] >= line.threshold) or ( + line.threshold < 0 and new_values[0] <= line.threshold + ): + alerts.append( + Alert( + metric=line.metric, + threshold=line.threshold, + current_value=new_values[0], + previous_value=None, + diff_value=None, + type=AlertType.goal_line, + ) + ) + return alerts + + # TODO remove periodicity_filter and make it part of yaml + def maybe_extract_alerts( + self, insight_query: SignalQuery, periodicity_filter: str = None + ): + periods = self.__fetch_periods(insight_query.periodicity) + projection_query, _ = self.__fetch_parent_projections(insight_query.projections) + + alerts: List[Alert] = [] + new_value_7d = None + for period in periods: + old_window, new_window = period + if periodicity_filter is not None: + if periodicity_filter != new_window.name: + continue + old_value, new_value = self.__execute_periodic_insights( + insight_query.query.sql, projection_query, old_window, new_window + ) + # TODO goal line will only be checked for L7D + if new_window.name == 'L7D': + new_value_7d = new_value + periodic_alerts = self.__periodic_alerts( + period_name=old_window.name, + old_window_value=old_value, + new_window_value=new_value, + possible_alerts=old_window.alerts, + ) + alerts.extend(periodic_alerts) + + goal_line_alerts = [] + if new_value_7d: + goal_line_alerts = self.__goal_line_alerts( + insight_query.goal_lines, new_value_7d + ) + + alerts.extend(goal_line_alerts) + + return ActionableAlerts( + id=insight_query.id, + name=insight_query.name, + title=insight_query.title, + type=insight_query.type, + description=insight_query.description, + alerts=alerts, + ) + + def __safe_fetch_metric(self, results: dict, metric: str): + value = results.get(metric, []) + return value[0] if len(value) > 0 else None + + def extract_raw_inner_query(self, insight_query: SignalQuery): + detailed_period = 'L7D' + if len(insight_query.periodicity) > 0: + detailed_period = insight_query.periodicity[0].period + period_day_count = int(detailed_period.removeprefix('L').removesuffix('D')) + + # picked the first window + _, new_window = self.__get_windows(detailed_period, period_day_count, []) + return self.repository.execute_query( + insight_query.query.sql, + start_date=new_window.start, + end_date=new_window.end, + ) + + def extract_detailed_insights(self, insight_query: SignalQuery): + projection_query, projections = self.__fetch_child_projections( + insight_query.projections + ) + + detailed_period = 'L7D' + if len(insight_query.periodicity) > 0: + detailed_period = insight_query.periodicity[0].period + period_day_count = int(detailed_period.removeprefix('L').removesuffix('D')) + + # picked the first window + old_window, new_window = self.__get_windows( + detailed_period, period_day_count, [] + ) + + results = self.repository.fetch_insights( + insight_query.query.sql, + projection_query, + start_date=new_window.start, + end_date=new_window.end, + ) + + metrices = [ + Metric( + metric=projection.metric, + name=projection.name, + value=self.__safe_fetch_metric( + results=results, metric=projection.metric + ), + ) + for projection in projections + ] + + if insight_query.plots is None or len(insight_query.plots) == 0: + logger.error('The insights query plots seems to be empty') + return DetailedInsights( + metrices=metrices, data_points=None, goal_lines=insight_query.goal_lines + ) + + projections = [p for p in insight_query.plots[0].metrices] + projection_queries = [ + f'{projection.sql} as {projection.metric}' for projection in projections + ] + pr_query = ','.join(projection_queries) + + raw_data_new: Dict[str, List] = self.repository.fetch_raw_values( + insight_query.query.sql, + projection=pr_query, + start_date=new_window.start, + end_date=new_window.end, + ) + + raw_data_old: Dict[str, List] = self.repository.fetch_raw_values( + insight_query.query.sql, + projection=pr_query, + start_date=old_window.start, + end_date=old_window.end, + ) + + return DetailedInsights( + metrices=metrices, + data_points=DataPoints( + window_type='L7D', old_window=raw_data_old, new_window=raw_data_new + ), + goal_lines=insight_query.goal_lines, + ) + + def fetch_pvo_records( + self, + odata_query: str | None = None, + params: Dict | None = None, + limit: str | None = None, + offset: str | None = None, + table_name: str = None, + ) -> List: + return self.repository.fetch_pvo_record( + odata_query, + params=params, + limit=limit, + offset=offset, + table_name=table_name, + ) + + def update_pvo_records_by_id( + self, + id: str, + table_name: str, + rls_filter: str, + rls_params: Dict[str, Any], + update_data: Dict[str, Any], + ) -> List: + return self.repository.update_pvo_record( + id=id, + table_name=table_name, + update_data=update_data, + odata_condition=rls_filter, + rls_params=rls_params, + ) diff --git a/wavefront/server/modules/insights_module/insights_module/service/pdo_service.py b/wavefront/server/modules/insights_module/insights_module/service/pdo_service.py new file mode 100644 index 00000000..f7016a1e --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/service/pdo_service.py @@ -0,0 +1,168 @@ +from abc import ABC +from abc import abstractmethod +from datetime import datetime +from datetime import timedelta +import json +import re + +import boto3 +from common_module.utils.odata_parser import prepare_odata_filter +from google.cloud import storage +from insights_module.service.insights_service import InsightsService + + +class PdoService(ABC): + @abstractmethod + def fetch_upto_limit( + self, filter: str | None, limit: int, offset: int, table_name: str = None + ): + pass + + @abstractmethod + def patch_record_by_id(self, id: str, table_name: str = None): + pass + + @abstractmethod + def fetch_audio(self): + pass + + @abstractmethod + def fetch_transcript(self): + pass + + +class AWSServices(PdoService): + def __init__( + self, + insights_service: InsightsService, + transcript_bucket_name, + audio_bucket_name, + ): + self._insight_service = insights_service + self._transcript_bucket_name = transcript_bucket_name + self._audio_bucket_name = audio_bucket_name + + def get_bucket_key(self, value: str): + match = re.match(r's3://([^/]+)/(.+)', value) + bucket_name = match.group(1) + key = match.group(2) + return bucket_name, key + + def fetch_upto_limit(self, filter, limit, offset, table_name=None): + odata_filter, params = prepare_odata_filter(filter) + return self._insight_service.fetch_pvo_records( + odata_query=odata_filter, + params=params, + limit=limit, + offset=offset, + table_name=table_name, + ) + + def fetch_audio(self, url): + audio_bucket_name, key = self.get_bucket_key(url) + + s3_client = boto3.client('s3') + presigned_url = s3_client.generate_presigned_url( + 'get_object', + Params={'Bucket': audio_bucket_name, 'Key': key}, + ExpiresIn=1800, + ) + + return presigned_url + + def fetch_transcript(self, url): + transcript_bucket_name, key = self.get_bucket_key(url) + s3_client = boto3.client('s3') + s3_response = s3_client.get_object( + Bucket=transcript_bucket_name, + Key=key, + ) + file_content = s3_response['Body'].read() + transcript_result: dict = json.loads(file_content) + + transcripts = { + 'transcript': transcript_result['transcribe'], + 'translate': transcript_result['translate'], + 'transcribe_diarized': transcript_result.get('transcribe_diarized', None), + 'translated_diarization': transcript_result.get( + 'translated_diarization', None + ), + 'speaker_mapping': transcript_result.get('speaker_mapping', None), + 'translation_diarization': transcript_result.get( + 'translation_diarization', False + ), + 'diarization': transcript_result.get('diarization', False), + } + return transcripts + + def patch_record_by_id(self, id, table_name=None): + raise NotImplementedError( + 'Patch operation is not implemented for AWS services.' + ) + + +class GCPServices(PdoService): + def __init__( + self, + insights_service: InsightsService, + transcript_bucket_name, + audio_bucket_name, + ): + self._insight_service = insights_service + self._transcript_bucket_name = transcript_bucket_name + self._audio_bucket_name = audio_bucket_name + self.client = storage.Client() + + def get_bucket_key(self, value: str): + match = re.match(r'gs://([^/]+)/(.+)', value) + bucket_name = match.group(1) + key = match.group(2) + return bucket_name, key + + def fetch_upto_limit(self, filter, limit, offset, table_name=None): + odata_filter, params = prepare_odata_filter(filter) + return self._insight_service.fetch_pvo_records( + odata_query=odata_filter, + params=params, + limit=limit, + offset=offset, + table_name=table_name, + ) + + def fetch_audio(self, url): + audio_bucket_name, key = self.get_bucket_key(url) + expiration = timedelta(minutes=30) + + bucket = self.client.bucket(audio_bucket_name) + blob = bucket.blob(key) + + presigned_url = blob.generate_signed_url( + version='v4', expiration=datetime.utcnow() + expiration, method='GET' + ) + + return presigned_url + + def fetch_transcript(self, url): + transcript_bucket_name, key = self.get_bucket_key(url) + + bucket = self.client.bucket(transcript_bucket_name) + blob = bucket.blob(key) + file_content = blob.download_as_bytes() + transcript_result = json.loads(file_content) + + transcripts = { + 'transcript': transcript_result['transcribe'], + 'translate': transcript_result['translate'], + } + + return transcripts + + def patch_record_by_id(self, id, update_data: dict, table_name, rls_filter: str): + odata_filter, params = prepare_odata_filter(rls_filter) + return self._insight_service.update_pvo_records_by_id( + id=id, + table_name=table_name, + rls_filter=odata_filter, + rls_params=params, + update_data=update_data, + ) diff --git a/wavefront/server/modules/insights_module/insights_module/service/usage_metric_service.py b/wavefront/server/modules/insights_module/insights_module/service/usage_metric_service.py new file mode 100644 index 00000000..4421659f --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/service/usage_metric_service.py @@ -0,0 +1,12 @@ +from insights_module.repository.pvo_repository import PVORepository + + +class UsageMetricService: + def __init__(self, repository: PVORepository, cloud_provider: str): + self.repository = repository + self.cloud_provider = cloud_provider + + def fetch_usage_metrics(self, start_time: str, end_time: str): + return self.repository.fetch_usage_metrics( + start_time=start_time, end_time=end_time, cloud_provider=self.cloud_provider + ) diff --git a/wavefront/server/modules/insights_module/insights_module/utils/helper.py b/wavefront/server/modules/insights_module/insights_module/utils/helper.py new file mode 100644 index 00000000..85f45a9c --- /dev/null +++ b/wavefront/server/modules/insights_module/insights_module/utils/helper.py @@ -0,0 +1,19 @@ +import collections + + +def fetch_data_filters(data_filters: list) -> str: + group_filter = collections.defaultdict(list) + for data_filter in data_filters: + group_filter[data_filter.key].append(data_filter.value) + + additional_filters = [] + for key, values in group_filter.items(): + if len(values) == 1: + additional_filters.append(f"({key} eq '{values[0]}')") + else: + or_condition = [] + for value in values: + or_condition.append(f"({key} eq '{value}')") + additional_filters.append(f"({'$or'.join(or_condition)})") + + return additional_filters diff --git a/wavefront/server/modules/insights_module/pyproject.toml b/wavefront/server/modules/insights_module/pyproject.toml new file mode 100644 index 00000000..487a36f6 --- /dev/null +++ b/wavefront/server/modules/insights_module/pyproject.toml @@ -0,0 +1,49 @@ +[project] +name = "insights-module" +version = "0.0.1" +description = "Insights Creation Module" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "db-repo-module", + "pyyaml>=6.0.3,<7", + "dependency-injector>=4.42.0,<5.0.0", + "redshift-connector>=2.1.5,<3.0.0", + "psycopg2>=2.9.10,<3.0.0", + "fastapi>=0.115.2,<1.0.0", + "dacite>=1.9.2,<2.0.0", + "httpx>=0.28.1,<1.0.0", + "boto3<=1.38.40", + "google-cloud-bigquery==3.34.0", + "google-cloud-storage<3.0.0", +] + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.4,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["insights_module"] diff --git a/wavefront/server/modules/insights_module/tests/conftest.py b/wavefront/server/modules/insights_module/tests/conftest.py new file mode 100644 index 00000000..a023079e --- /dev/null +++ b/wavefront/server/modules/insights_module/tests/conftest.py @@ -0,0 +1,239 @@ +import json +from unittest.mock import Mock +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from dependency_injector import providers +from fastapi import FastAPI +from fastapi.testclient import TestClient +from insights_module.controllers.pdo_controller import pdo_router +from insights_module.insights_container import InsightsContainer +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + # setting up the dependencies for the requireauth middleware + auth_container = AuthContainer() + common_container = CommonContainer() + user_container = UserContainer() + + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + + # mocking the cache manager + cache_manager_mock = Mock() + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'session_id': test_session_id} + ) + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + + common_container.cache_manager.override(db_repo_container.cache_manager) + insights_container = InsightsContainer( + notification_repository=db_repo_container.notification_repository, + ) + + # Mock connector for PVORepository + mock_connector = Mock() + mock_connector.execute_query.return_value = ([], []) + insights_container.connector.override(providers.Singleton(lambda: mock_connector)) + + # Mock cloud service + mock_cloud_service = Mock() + mock_cloud_service.fetch_audio.return_value = ( + 'https://example.com/audio/test_audio.mp3' + ) + mock_cloud_service.fetch_upto_limit.return_value = [ + { + 'id': 'test_id_1', + 'conversation_id': 'conv_1', + 'created_at': '2024-03-20T10:00:00', + 'rf_transcription_status': 'success', + 'rf_insights_status': 'success', + 'total_duration': 300, + }, + { + 'id': 'test_id_2', + 'conversation_id': 'conv_2', + 'created_at': '2024-03-20T11:00:00', + 'rf_transcription_status': 'success', + 'rf_insights_status': 'success', + 'total_duration': 450, + }, + ] + mock_cloud_service.fetch_transcript.return_value = { + 'transcript': 'This is a test transcript', + 'metadata': {'duration': 300, 'speaker_count': 2}, + } + + insights_container.cloud_service.override( + providers.Singleton(lambda: mock_cloud_service) + ) + + # mocking the token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + # overriding the auth container dependencies + auth_container.token_service.override(mock_token_service) + auth_container.db_client.override(db_repo_container.db_client) + auth_container.cache_manager.override(cache_manager_mock) + + # overriding the user container dependencies + user_container.db_client.override(db_repo_container.db_client) + user_container.cache_manager.override(cache_manager_mock) + + auth_container.wire( + packages=[ + 'insights_module.controllers', + 'user_management_module.authorization', + ] + ) + + user_container.wire( + packages=[ + 'user_management_module.authorization', + 'auth_module.controllers', + 'insights_module.controllers', + ] + ) + common_container.wire( + packages=[ + 'insights_module.controllers', + 'user_management_module.authorization', + ] + ) + insights_container.wire( + packages=[ + 'insights_module.controllers', + ] + ) + + yield auth_container, common_container, user_container, insights_container + auth_container.unwire() + common_container.unwire() + user_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequestIdMiddleware) + app.add_middleware(RequireAuthMiddleware) + app.include_router(pdo_router, prefix='/floware/v1/insights') + return TestClient(app) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _, _, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token + + +@pytest.fixture +def mocking_pdo_controller_is_admin(monkeypatch): + async def mock_check_admin(role_id): + return True + + monkeypatch.setattr( + 'insights_module.controllers.pdo_controller.check_admin', + mock_check_admin, + ) + + +@pytest.fixture +def mocking_pdo_controller_not_admin(monkeypatch): + async def mock_check_admin(role_id): + return False + + monkeypatch.setattr( + 'insights_module.controllers.pdo_controller.check_admin', + mock_check_admin, + ) + + +@pytest.fixture +def mock_pvo_repository_emptydata(setup_containers): + _, _, _, insights_container = setup_containers + mock_cloud_service = Mock() + mock_cloud_service.fetch_audio.return_value = ( + 'https://example.com/audio/test_audio.mp3' + ) + mock_cloud_service.fetch_upto_limit.return_value = [] + + # Configure the mock to return the dictionary directly using AsyncMock + mock_cloud_service.fetch_transcript = Mock(return_value={}) + insights_container.cloud_service.override( + providers.Singleton(lambda: mock_cloud_service) + ) diff --git a/wavefront/server/modules/insights_module/tests/test_pvo_controller.py b/wavefront/server/modules/insights_module/tests/test_pvo_controller.py new file mode 100644 index 00000000..c26d3e96 --- /dev/null +++ b/wavefront/server/modules/insights_module/tests/test_pvo_controller.py @@ -0,0 +1,275 @@ +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + + +@pytest.mark.asyncio +async def create_test_resources_and_roles(test_session: AsyncSession, test_user_id): + async with test_session() as session: + # Create and commit role first + role = Role( + id='test_role_id', name='test_role', description='Test role for PDO access' + ) + session.add(role) + await session.commit() + + # Create and commit resource + resource = Resource( + id='test_resource_id', + key='test_resource', + value='test_value', + scope=ResourceScope.DATA, + ) + session.add(resource) + await session.commit() + + # Create role-resource mapping + role_resource = RoleResource( + role_id='test_role_id', resource_id='test_resource_id' + ) + session.add(role_resource) + + # Create user-role mapping + user_role = UserRole(user_id=test_user_id, role_id='test_role_id') + session.add(user_role) + await session.commit() + + +@pytest.mark.asyncio +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_get_pvo_records( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert len(response.json()['data']['records']) == 2 + + +@pytest.mark.asyncio +async def test_get_pvo_record_with_empty_result( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, + mock_pvo_repository_emptydata, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert len(response.json()['data']['records']) == 0 + + +@pytest.mark.asyncio +async def test_get_pvo_records_without_admin_without_data_filter( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_not_admin, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 403 + + +@pytest.mark.asyncio +async def test_get_pvo_record_without_admin_and_data_filter( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_not_admin, +): + await create_session(test_session, test_user_id, test_session_id) + await create_test_resources_and_roles(test_session, test_user_id) + + response = test_client.get( + '/floware/v1/insights/parsed_data_object', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert len(response.json()['data']['records']) == 2 + + +@pytest.mark.asyncio +async def test_get_pvo_records_with_pagination( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting PDO records with pagination parameters""" + await create_session(test_session, test_user_id, test_session_id) + await create_test_resources_and_roles(test_session, test_user_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object?limit=1&offset=1', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert ( + len(response.json()['data']['records']) == 2 + ) # bcz the fucntion always return 2 records + + +@pytest.mark.asyncio +async def test_get_pvo_records_with_filter( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting PDO records with filter parameter""" + await create_session(test_session, test_user_id, test_session_id) + await create_test_resources_and_roles(test_session, test_user_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object?$filter=conversation_id eq conv', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_get_pvo_audio( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting audio URL for a PDO record""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/audio?resource_url=test_audio.mp3', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert 'audio_url' in response.json()['data'] + assert response.json()['data']['audio_url'] is not None + + +@pytest.mark.asyncio +async def test_get_pvo_audio_without_auth( + test_session: AsyncSession, test_user_id, test_session_id, test_client +): + """Test getting audio URL without authentication""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/audio?resource_url=test_audio.mp3' + ) + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_get_pvo_audio_without_resource_url( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting audio URL without providing resource_url parameter""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/audio', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 422 # FastAPI validation error + + +@pytest.mark.asyncio +async def test_get_pvo_transcript( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting transcript for a PDO record""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/transcript?resource_url=test_transcript.json', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_get_pvo_transcript_without_auth( + test_session: AsyncSession, test_user_id, test_session_id, test_client +): + """Test getting transcript without authentication""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/transcript?resource_url=test_transcript.json' + ) + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_get_pvo_transcript_without_resource_url( + test_session: AsyncSession, + test_user_id, + test_session_id, + test_client, + auth_token, + mocking_pdo_controller_is_admin, +): + """Test getting transcript without providing resource_url parameter""" + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/insights/parsed_data_object/transcript', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 422 # FastAPI validation error diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/__init__.py new file mode 100644 index 00000000..bd016837 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/__init__.py @@ -0,0 +1,5 @@ +""" +LightRAG Module - A lightweight RAG implementation for document processing and querying. +""" + +__version__ = '0.1.0' diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_controller.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_controller.py new file mode 100644 index 00000000..276b1f12 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_controller.py @@ -0,0 +1,211 @@ +from datetime import datetime +import uuid + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import HTTPException +from fastapi import Query +from fastapi import status +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from knowledge_base_module.models.knowledge_base_schema import NewKnowledge +from pydantic import BaseModel +from sqlalchemy import Result +from sqlalchemy import select + +knowledge_base_router = APIRouter() + + +class KnowledgeBaseResponse(BaseModel): + """Response model for knowledge base data.""" + + id: uuid.UUID + name: str + description: str + type: str + created_at: datetime + updated_at: datetime + + +@knowledge_base_router.post('/v1/knowledge-bases') +@inject +async def create_knowledge_base( + new_base: NewKnowledge, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), +) -> JSONResponse: + """Create a new knowledge base.""" + # Check for existing knowledge base + existing_knowledge_base = await knowledge_base_repository.find_one( + name=new_base.name + ) + if existing_knowledge_base: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the same name already exists' + ), + ) + + # Create new knowledge base + async with knowledge_base_repository.session() as session: + new_kb = KnowledgeBase( + name=new_base.name, + description=new_base.description, + type=new_base.type, + vector_size=new_base.vector_size, + vector_size_1=new_base.vector_size_1 if new_base.vector_size_1 else None, + ) + session.add(new_kb) + await session.flush() + new_kb_id = new_kb.id + await session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the knowledge base successfully', + 'knowledge_base_id': str(new_kb_id), + } + ), + ) + + +@knowledge_base_router.get( + '/v1/knowledge-bases/{kb_id}', response_model=KnowledgeBaseResponse +) +@inject +async def get_knowledge_bases_id( + kb_id: uuid.UUID, + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), +) -> KnowledgeBaseResponse: + """Get knowledge base by ID.""" + fetch_knowledge_base_id = await knowledge_base_repository.find_one(id=kb_id) + if not fetch_knowledge_base_id: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Knowledge Base with the mentioned id doesn't exist", + ) + + return KnowledgeBaseResponse( + id=fetch_knowledge_base_id.id, + name=fetch_knowledge_base_id.name, + description=fetch_knowledge_base_id.description, + type=fetch_knowledge_base_id.type, + created_at=fetch_knowledge_base_id.created_at, + updated_at=fetch_knowledge_base_id.updated_at, + ) + + +@knowledge_base_router.get('/v1/knowledge-bases') +@inject +async def get_knowledge_bases( + offset: int = Query(0, ge=0, description='The number of items to skip'), + limit: int = Query( + 10, ge=1, le=100, description='The maximum number of items to return' + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), +) -> JSONResponse: + """Get all knowledge bases with pagination.""" + async with knowledge_base_repository.session() as session: + sql = select(KnowledgeBase).slice(offset, limit) + results: Result = await session.execute(sql) + resources = results.scalars().all() + data = [res.to_dict() for res in resources] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': data}), + ) + + +@knowledge_base_router.put('/v1/knowledge-bases/{kb_id}') +@inject +async def update_knowledge_bases( + kb_id: uuid.UUID, + new_base: NewKnowledge, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), +) -> JSONResponse: + """Update an existing knowledge base.""" + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + "Knowledge Base with the given id doesn't exist" + ), + ) + + await knowledge_base_repository.find_one_and_update( + {'id': kb_id}, + name=new_base.name, + description=new_base.description, + type=new_base.type, + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Updated the Knowledge Base successfully', + 'knowledge_base_id': str(kb_id), + } + ), + ) + + +@knowledge_base_router.delete('/v1/knowledge-bases/{kb_id}') +@inject +async def delete_knowledge_base( + kb_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), +) -> JSONResponse: + """Delete a knowledge base.""" + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + "Knowledge Base with the given id doesn't exist" + ), + ) + + await knowledge_base_repository.delete_all(id=kb_id) + + return JSONResponse( + status_code=status.HTTP_204_NO_CONTENT, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Deleted the Knowledge Base successfully', + 'knowledge_base_id': str(kb_id), + } + ), + ) diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py new file mode 100644 index 00000000..f9260db9 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py @@ -0,0 +1,341 @@ +import asyncio +from datetime import datetime +import os +import re +from typing import Optional +import uuid +import json + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import HTTPException +from fastapi import Query +from fastapi import status +from fastapi import UploadFile +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from fastapi import Form +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from flo_cloud.message_queue import MessageQueueManager +from flo_cloud.cloud_storage import CloudStorageManager +from pydantic import BaseModel +from sqlalchemy import Result +from sqlalchemy import select + +kb_document_router = APIRouter() + + +async def handle_database_error(session, error_msg: str, error) -> JSONResponse: + """Handle database errors and return appropriate response.""" + await session.rollback() + logger.error(f'{error_msg} with error as {str(error)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=ResponseFormatter.buildErrorResponse(str(error)), + ) + + +class KnowledgeBaseDocumentResponse(BaseModel): + """Response model for knowledge base document data.""" + + id: uuid.UUID + knowledge_base_id: uuid.UUID + file_path: str + file_name: str + file_type: str + file_size: str + created_at: datetime + updated_at: datetime + + +@kb_document_router.post('/v1/knowledge-bases/{kb_id}/documents') +@inject +async def upload_document( + kb_id: uuid.UUID, + file: UploadFile, + metadata: str = Form(None), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + knowledge_base_documents_repository: SQLAlchemyRepository[ + KnowledgeBaseDocuments + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_documents_repository]), + cloud_storage: CloudStorageManager = Depends( + Provide[KnowledgeBaseContainer.cloud_storage] + ), + message_queue: MessageQueueManager = Depends( + Provide[KnowledgeBaseContainer.message_queue] + ), + config=Depends(Provide[KnowledgeBaseContainer.config]), +) -> JSONResponse: + """Upload and process a document for a knowledge base.""" + temp_file_path = None + try: + # Validate knowledge base exists + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the given id does not exist' + ), + ) + + # Check for existing document + existing_kb_documents = await knowledge_base_documents_repository.find_one( + id=kb_id + ) + if existing_kb_documents: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Document already exists for this knowledge base' + ), + ) + + # Process file content + file_bytes = await file.read() + doc_id = uuid.uuid4() + filename = file.filename.replace(' ', '_') + filename = re.sub(r'_{2,}', '_', filename) + gcs_file_name = f'kb_{kb_id}/{doc_id}/{filename}' + + # Create document record + # Safely load the JSON string into a dictionary + parsed_metadata = None + if metadata is not None: + parsed_metadata = json.loads(metadata) + + async with knowledge_base_documents_repository.session() as session: + new_kb_document = KnowledgeBaseDocuments( + id=doc_id, + knowledge_base_id=kb_id, + file_path=gcs_file_name, + file_name=file.filename, + file_type=file.content_type.split('/')[1], + file_size=file.size, + metadata_value=parsed_metadata, + ) + + session.add(new_kb_document) + await session.commit() + + # Upload to cloud storage + logger.info(f'The data filename is {gcs_file_name}') + bucket_name = ( + config['gcp']['gcp_asset_storage_bucket'] + if config['cloud_config']['cloud_provider'] == 'gcp' + else config['aws']['aws_asset_storage_bucket'] + ) + await asyncio.to_thread( + cloud_storage.save_small_file, + file_content=file_bytes, + bucket_name=bucket_name, + key=gcs_file_name, + content_type=file.content_type, + ) + logger.info(f'File uploaded to cloud storage: {gcs_file_name}') + try: + data = { + 'bucket': bucket_name, + 'name': gcs_file_name, + 'kb_id': str(kb_id), + 'doc_id': str(doc_id), + 'file_type': file.content_type, + 'parse_type': 'kb_insertion', + } + topic_id = ( + config['gcp']['email_topic_id'] + if config['cloud_config']['cloud_provider'] == 'gcp' + else config['aws']['queue_url'] + ) + message_id = message_queue.add_message( + message_body=data, topic_name_or_queue_url=topic_id + ) + logger.info(f'The subscription message is {message_id}') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the knowledge base documents and embeddings successfully', + 'knowledge_base_id': str(kb_id), + } + ), + ) + except Exception as err: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Error while pushing the documents to auraflo as {err}' + ), + ) + + except Exception as e: + logger.error(f'Error while processing document: {str(e)}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + finally: + if temp_file_path and os.path.exists(temp_file_path): + os.unlink(temp_file_path) + + +@kb_document_router.get('/v1/knowledge-bases/{kb_id}/documents') +@inject +async def get_documents( + kb_id: uuid.UUID, + file_type: Optional[str] = Query(None, description='Type of file to filter by'), + offset: int = Query(0, ge=0, description='The number of items to skip'), + limit: int = Query( + 10, ge=1, le=100, description='The maximum number of items to return' + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_documents_repository: SQLAlchemyRepository[ + KnowledgeBaseDocuments + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_documents_repository]), +) -> JSONResponse: + """Get documents for a knowledge base with optional filtering and pagination.""" + # Validate knowledge base exists + existing_document = await knowledge_base_documents_repository.find_one( + knowledge_base_id=kb_id + ) + if not existing_document: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': []}), + ) + + # Fetch documents + async with knowledge_base_documents_repository.session() as session: + query = select(KnowledgeBaseDocuments).where( + KnowledgeBaseDocuments.knowledge_base_id == kb_id + ) + + if file_type: + query = query.where(KnowledgeBaseDocuments.file_type == file_type) + + query = query.slice(offset, limit) + + results: Result = await session.execute(query) + resources = results.scalars().all() + data = [res.to_dict() for res in resources] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': data}), + ) + + +@kb_document_router.delete('/v1/knowledge-bases/{kb_id}/documents/{document_id}') +@inject +async def delete_documents( + kb_id: uuid.UUID, + document_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_documents_repository: SQLAlchemyRepository[ + KnowledgeBaseDocuments + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_documents_repository]), + knowledge_base_embeddings_repository: SQLAlchemyRepository[ + KnowledgeBaseEmbeddings + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_embeddings_repository]), +) -> JSONResponse: + """Delete a document and its associated embeddings from a knowledge base.""" + # Validate document exists + existing_document = await knowledge_base_documents_repository.find_one( + id=document_id, knowledge_base_id=kb_id + ) + if not existing_document: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Document not found for this knowledge base' + ), + ) + + # Delete document and embeddings + await knowledge_base_documents_repository.delete_all( + id=document_id, knowledge_base_id=kb_id + ) + await knowledge_base_embeddings_repository.delete_all(document_id=document_id) + + return JSONResponse( + status_code=status.HTTP_204_NO_CONTENT, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Deleted the Knowledge Base Documents and embeddings records successfully', + 'knowledge_base_id': str(kb_id), + } + ), + ) + + +@kb_document_router.get('/v1/knowledge-bases/{kb_id}/document/{document_id}') +@inject +async def get_document_with_id( + kb_id: uuid.UUID, + document_id: uuid.UUID, + signed_url: Optional[bool] = False, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_documents_repository: SQLAlchemyRepository[ + KnowledgeBaseDocuments + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_documents_repository]), + config: dict = Depends(Provide[KnowledgeBaseContainer.config]), + cloude_storage_manager: CloudStorageManager = Depends( + Provide[KnowledgeBaseContainer.cloud_storage_manager] + ), +) -> JSONResponse: + """Get a document for a knowledge base by id, optionally returning a signed URL.""" + # Validate document exists + existing_document = await knowledge_base_documents_repository.find_one( + id=document_id, knowledge_base_id=kb_id + ) + if not existing_document: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Document not found for this knowledge base' + ), + ) + if signed_url: + provider = config['cloud_config']['cloud_provider'] + bucket = ( + config['gcp']['gcp_asset_storage_bucket'] + if provider.lower() == 'gcp' + else config['aws']['aws_asset_storage_bucket'] + ) + presigned_url = cloude_storage_manager.generate_presigned_url( + bucket, existing_document.file_path, 'GET' + ) + response_data = existing_document.to_dict() + response_data['signed_url'] = presigned_url + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'resources': response_data} + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'resources': existing_document.to_dict()} + ), + ) diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/rag_retreival_controller.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/rag_retreival_controller.py new file mode 100644 index 00000000..ce88782f --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/rag_retreival_controller.py @@ -0,0 +1,548 @@ +from typing import List, Optional +import uuid + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Query +from fastapi import status +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from knowledge_base_module.models.knowledge_base_schema import ( + NewInference, +) +from knowledge_base_module.services.kb_rag_retrieve import KBRagResponse +from knowledge_base_module.services.image_rag_retrieve import ImageRagRetrieve +from pydantic import BaseModel, Field +from datetime import datetime +from sqlalchemy import Result +from sqlalchemy import select + +rag_retrieval_router = APIRouter() + + +class KnowledgeInferenceResponse(BaseModel): + """Response model for knowledge base data.""" + + inference_id: uuid.UUID + knowledge_base_id: uuid.UUID + inference_content: dict + created_at: datetime + updated_at: datetime + + +class RetrieveSchema(BaseModel): + """Response model for Retrieve schema.""" + + embedding: Optional[List[float]] = None + query: str + kb_id: uuid.UUID + threshold: Optional[float] = None + top_k: Optional[int] = None + vector_weight: Optional[float] = None + keyword_weight: Optional[float] = None + + +class EmbeddingSchema(BaseModel): + """Response model for Embedding vector.""" + + embedding_vector: List[List[float]] + embedding_vector_1: Optional[List[List[float]]] = Field( + default_factory=lambda: [[]] + ) + document_id: uuid.UUID + kb_id: uuid.UUID + chunk_text: List[str] + chunk_index: List[str] + + +class DocWiseEmbeddingSchema(BaseModel): + """Response model for Doc wise embedding.""" + + embeddings: List[EmbeddingSchema] + + +class ImagePayload(BaseModel): + """Payload for Image embedding.""" + + image_data: Optional[str] = None + + +class DocumentPayload(BaseModel): + """Payload for Document embedding.""" + + inference_id: uuid.UUID + query: Optional[str] = None + model: Optional[str] = None + + +def convert_uuids_to_str(data): + """Recursively converts UUID objects in a dictionary or list to strings.""" + if isinstance(data, dict): + return {key: convert_uuids_to_str(value) for key, value in data.items()} + elif isinstance(data, list): + return [convert_uuids_to_str(element) for element in data] + elif isinstance(data, uuid.UUID): + return str(data) + else: + return data + + +@rag_retrieval_router.post('/v1/knowledge-base/{kb_id}/retrieve') +@inject +async def retrieve_query( + kb_id: uuid.UUID, + query: Optional[str] = None, + payload: Optional[ImagePayload] = None, + threshold: Optional[float] = Query(None, description='Cosine similarity threshold'), + top_k: Optional[int] = Query(None, description='Number of results to return'), + vector_weight: Optional[float] = Query( + None, description='Weight for vector similarity score' + ), + keyword_weight: Optional[float] = Query( + None, description='Weight for keyword similarity score' + ), + offset: Optional[int] = Query(None, description='Number of results to skip'), + limit: Optional[int] = Query( + None, description='Number of results to return (overrides top_k)' + ), + query_filter: str | None = Query(None, alias='$filter'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + rag_retrieval: KBRagResponse = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_retrieve] + ), + image_rag_retrieval: ImageRagRetrieve = Depends( + Provide[KnowledgeBaseContainer.image_knowledge_base_retrieve] + ), + config: dict = Depends(Provide[KnowledgeBaseContainer.config]), +): + if not query and not payload: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Query or Image data should not be empty' + ), + ) + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the mentioned id doesnt exist' + ), + ) + if query: + retrieved_docs = await rag_retrieval.retrieve_documents( + query, + kb_id, + threshold, + vector_weight, + keyword_weight, + query_filter, + offset, + limit, + ) + else: + inference_url = config['model']['inference_service_url'] + retrieved_docs = await image_rag_retrieval.retrieve_images( + payload.image_data, + inference_url, + kb_id, + threshold, + top_k, + query_filter, + offset, + limit, + ) + retrieved_docs = convert_uuids_to_str(retrieved_docs) + if not retrieved_docs: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'documents': []}), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'documents': retrieved_docs} + ), + ) + + +@rag_retrieval_router.post('/v1/knowledge-base/{kb_id}/augment/{inference_id}') +@inject +async def rag_response( + kb_id: uuid.UUID, + inference_id: uuid.UUID, + query: Optional[str] = Query(None, description='rag query to passed'), + model: Optional[str] = Query(None, description='model name to be passed'), + threshold: Optional[float] = Query(None, description='Cosine similarity threshold'), + vector_weight: Optional[float] = Query( + None, description='Weight for vector similarity score' + ), + keyword_weight: Optional[float] = Query( + None, description='Weight for keyword similarity score' + ), + offset: Optional[int] = Query(None, description='Number of results to skip'), + limit: Optional[int] = Query( + None, description='Number of results to return (overrides top_k)' + ), + query_filter: str | None = Query(None, alias='$filter'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + rag_retrieval: KBRagResponse = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_retrieve] + ), + kb_inference_repository: SQLAlchemyRepository[KnowledgeBaseInferences] = Depends( + Provide[KnowledgeBaseContainer.kb_inference_repository] + ), +): + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the mentioned id doesnt exist' + ), + ) + existing_inference = await kb_inference_repository.find_one( + knowledge_base_id=kb_id, inference_id=inference_id + ) + if not existing_inference: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base inference with the mentioned knowledge_base_id and inference_id doesnt exist' + ), + ) + # Validate query is provided + if not query: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Query must be provided either in request body or as query parameter' + ), + ) + + # Fetch LLM config if provided + llm_config = None + llm_inference_config_id = existing_inference.config_id + async with kb_inference_repository.session() as session: + statement = ( + select(LlmInferenceConfig) + .join( + KnowledgeBaseInferences, + LlmInferenceConfig.id == KnowledgeBaseInferences.config_id, + ) + .where(LlmInferenceConfig.id == llm_inference_config_id) + ) + result: Result = await session.execute(statement) + llm_config_result = result.scalars().first() + llm_config_dict = ( + llm_config_result.to_dict(exclude_api_key=False) + if llm_config_result + else None + ) + + if not llm_config_dict: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {llm_inference_config_id}' + ), + ) + else: + llm_config = LlmInferenceConfig(**llm_config_dict) + + prompt = existing_inference.inference_content + response = await rag_retrieval.query( + query, + kb_id, + prompt, + threshold, + vector_weight, + keyword_weight, + model, + query_filter, + offset, + limit, + llm_config, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'response': response}), + ) + + +@rag_retrieval_router.post( + '/v1/knowledge-base/{kb_id}/llm_config/{config_id}/inference' +) +@inject +async def create_system_prompt( + kb_id: uuid.UUID, + config_id: uuid.UUID, + inference: NewInference, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + kb_inference_repository: SQLAlchemyRepository[KnowledgeBaseInferences] = Depends( + Provide[KnowledgeBaseContainer.kb_inference_repository] + ), + llm_config_repository: SQLAlchemyRepository[LlmInferenceConfig] = Depends( + Provide[KnowledgeBaseContainer.llm_config_repository] + ), +): + existing_kb = await knowledge_base_repository.find_one(id=kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Knowledge Base with the mentioned id doesnt exist' + ), + ) + llm_config = await llm_config_repository.find_one(id=config_id) + if not llm_config: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'LLM config id is not present on llm config table' + ), + ) + async with kb_inference_repository.session() as session: + new_inference = KnowledgeBaseInferences( + knowledge_base_id=kb_id, + inference_content=inference.prompt, + config_id=config_id, + ) + session.add(new_inference) + await session.flush() + new_inference_id = new_inference.inference_id + await session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the knowledge base inference table successfully', + 'inference_id': str(new_inference_id), + } + ), + ) + + +@rag_retrieval_router.get('/v1/knowledge-base/{kb_id}/inference') +@inject +async def get_system_prompt( + kb_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + kb_inference_repository: SQLAlchemyRepository[KnowledgeBaseInferences] = Depends( + Provide[KnowledgeBaseContainer.kb_inference_repository] + ), +): + existing_inference = await kb_inference_repository.find_one(knowledge_base_id=kb_id) + if not existing_inference: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': []}), + ) + + async with kb_inference_repository.session() as session: + query = select(KnowledgeBaseInferences).where( + KnowledgeBaseInferences.knowledge_base_id == kb_id + ) + + results: Result = await session.execute(query) + resources = results.scalars().all() + data = [res.to_dict() for res in resources] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': data}), + ) + + +@rag_retrieval_router.delete('/v1/knowledge-base/{kb_id}/inference/{inference_id}') +@inject +async def delete_system_prompt( + kb_id: uuid.UUID, + inference_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + kb_inference_repository: SQLAlchemyRepository[KnowledgeBaseInferences] = Depends( + Provide[KnowledgeBaseContainer.kb_inference_repository] + ), +): + existing_inference = await kb_inference_repository.find_one( + knowledge_base_id=kb_id, inference_id=inference_id + ) + if not existing_inference: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Inference Id is not present in the knowledge base inference' + ), + ) + + # Delete document and embeddings + await kb_inference_repository.delete_all( + inference_id=inference_id, knowledge_base_id=kb_id + ) + + return JSONResponse( + status_code=status.HTTP_204_NO_CONTENT, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Deleted the inference successfully', + 'inference_id': str(inference_id), + } + ), + ) + + +@rag_retrieval_router.post('/v1/store_embedding') +@inject +async def store_embeddings( + payload: DocWiseEmbeddingSchema, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + knowledge_base_embeddings_repository: SQLAlchemyRepository[ + KnowledgeBaseEmbeddings + ] = Depends(Provide[KnowledgeBaseContainer.knowledge_base_embeddings_repository]), +) -> JSONResponse: + embeddings_table = [] + for embedding in payload.embeddings: + existing_kb = await knowledge_base_repository.find_one(id=embedding.kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'There is no knowledge bases based on the id' + ), + ) + vector_size = existing_kb.vector_size + vector_size_1 = existing_kb.vector_size_1 + if len(embedding.embedding_vector[0]) != vector_size or ( + vector_size_1 and len(embedding.embedding_vector_1[0]) != vector_size_1 + ): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + "The vector size on the embedding doesn't match the required embedding vector size" + ), + ) + + kb_embeddings = [ + KnowledgeBaseEmbeddings( + document_id=embedding.document_id, + embedding_vector=embedding.embedding_vector[index], + embedding_vector_1=embedding.embedding_vector_1[index] + if embedding.embedding_vector_1[index] + else None, + chunk_text=embedding.chunk_text[index], + chunk_index=int(embedding.chunk_index[index].split('_')[1]), + ) + for index in range(len(embedding.embedding_vector)) + ] + + embeddings_table.extend(kb_embeddings) + + async with knowledge_base_embeddings_repository.session() as session: + session.add_all(embeddings_table) + await session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created the knowledge base documents and embeddings successfully', + } + ), + ) + + +@rag_retrieval_router.post('/v1/retrieve') +@inject +async def retrieve_record( + payload: RetrieveSchema, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase] = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_repository] + ), + rag_retrieval: KBRagResponse = Depends( + Provide[KnowledgeBaseContainer.knowledge_base_retrieve] + ), +): + existing_kb = await knowledge_base_repository.find_one(id=payload.kb_id) + if not existing_kb: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + "Knowledge Base with the mentioned id doesn't exist" + ), + ) + if not payload.embedding: + retrieved_docs = await rag_retrieval.retrieve_documents( + payload.query, + payload.kb_id, + payload.threshold, + payload.top_k, + payload.vector_weight, + payload.keyword_weight, + ) + else: + params = { + 'threshold': payload.threshold or 0.2, + 'top_k': payload.top_k or 5, + 'vector_weight': payload.vector_weight or 0.7, + 'keyword_weight': payload.keyword_weight or 0.3, + 'kb_id': payload.kb_id, + } + retrieved_docs = await rag_retrieval.combined_search_with_reranking( + payload.query, payload.embedding, params + ) + for doc in retrieved_docs: + for key, value in doc.items(): + if isinstance(value, uuid.UUID): + doc[key] = str(value) + if not retrieved_docs: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'There doesnt exist any matching documents on the mentioned query {payload.query}' + ), + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'documents': retrieved_docs} + ), + ) diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/embed.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/embed.py new file mode 100644 index 00000000..13d4674d --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/embed.py @@ -0,0 +1,53 @@ +import requests +from typing import List +from dataclasses import dataclass + + +@dataclass +class KnowledgeBaseEmbeddingObject: + embedding_vector: List[float] + chunk_text: str + chunk_index: str + + +class EmbeddingFunc: + def __init__(self, embedding_url): + self.max_batch_size = 32 + self.bgm_url = f'{embedding_url}/v1/embeddings' + + def generate_document_embeddings(self, chunks): + contents = [v['content'] for v in chunks.values()] + batches = [ + contents[i : i + self.max_batch_size] + for i in range(0, len(contents), self.max_batch_size) + ] + + embeddings = [self.bgm_embedding(batch) for batch in batches] + # Flatten embeddings list + flat_embeddings = [item for sublist in embeddings for item in sublist] + + data_list = [] + for i, (k, v) in enumerate(chunks.items()): + data_list.append( + KnowledgeBaseEmbeddingObject( + embedding_vector=flat_embeddings, + chunk_text=v['content'], + chunk_index=k, + ) + ) + return data_list, flat_embeddings + + def generate_chunk_embeddings(self, chunks): + embeddings = [self.bgm_embedding(chunks)] + return embeddings + + def bgm_embedding(self, texts): + response = requests.post( + self.bgm_url, + json={ + 'model': 'BAAI/bge-m3', + 'input': texts, + 'encoding_format': 'float', + }, + ) + return response.json()['data'][0]['embedding'] diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py new file mode 100644 index 00000000..dfaa6e56 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/embeddings/llm.py @@ -0,0 +1,55 @@ +from typing import Any, Optional +from flo_ai.helpers.llm_factory import LLMFactory +from flo_ai.llm import Gemini +from db_repo_module.models.llm_inference_config import LlmInferenceConfig + + +class LLMModelFunc: + def _create_llm_instance(self, config: LlmInferenceConfig): + """ + Create LLM instance based on configuration + + Args: + config: LLM inference configuration + + Returns: + LLM instance + """ + return LLMFactory.create_llm({'provider': 'rootflo', 'model_id': config.id}) + + async def generate_response( + self, + query, + sys_prompt, + model, + llm_config: Optional[LlmInferenceConfig] = None, + ): + """ + Generate LLM response + + Args: + query: User query + sys_prompt: System prompt + conversation_history: Conversation history + model: Model name (used if llm_config not provided) + llm_config: Optional LLM inference configuration + + Returns: + Generated response content + """ + messages: list[dict[str, Any]] = [] + if sys_prompt: + messages.append({'role': 'system', 'content': sys_prompt}) + messages.append({'role': 'user', 'content': query}) + + # Use config-based LLM if provided, otherwise fall back to default Gemini + if llm_config: + llm = self._create_llm_instance(llm_config) + else: + llm = Gemini( + model=model, + temperature=0.7, + ) + + response = await llm.generate(messages) + return llm.get_message_content(response) diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/knowledge_base_container.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/knowledge_base_container.py new file mode 100644 index 00000000..35588628 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/knowledge_base_container.py @@ -0,0 +1,85 @@ +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector import containers +from dependency_injector import providers +from knowledge_base_module.services.kb_rag_retrieve import KBRagResponse +from knowledge_base_module.services.kb_rag_storage import KBRagStorage +from flo_cloud.message_queue import MessageQueueManager +from flo_cloud.cloud_storage import CloudStorageManager +from knowledge_base_module.services.image_rag_retrieve import ImageRagRetrieve + + +class KnowledgeBaseContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + db_client = providers.Dependency() + cache_manager = providers.Dependency() + + knowledge_base_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBase], + model=KnowledgeBase, + db_client=db_client, + ) + + knowledge_base_documents_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseDocuments], + model=KnowledgeBaseDocuments, + db_client=db_client, + ) + + knowledge_base_embeddings_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseEmbeddings], + model=KnowledgeBaseEmbeddings, + db_client=db_client, + ) + + llm_config_repository = providers.Singleton( + SQLAlchemyRepository[LlmInferenceConfig], + model=LlmInferenceConfig, + db_client=db_client, + ) + + email_rag_service = providers.Factory( + KBRagStorage, embedding_url=config.embedding_url.embedding_service_url + ) + + knowledge_base = providers.Singleton(KnowledgeBase) + + knowledge_base_retrieve = providers.Singleton( + KBRagResponse, + knowledge_base_documents_repository, + knowledge_base_embeddings_repository, + embedding_url=config.embedding_url.embedding_service_url, + ) + + knowledge_base_embeddings_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseEmbeddings], + model=KnowledgeBaseEmbeddings, + db_client=db_client, + ) + + kb_inference_repository = providers.Singleton( + SQLAlchemyRepository[KnowledgeBaseInferences], + model=KnowledgeBaseInferences, + db_client=db_client, + ) + + cloud_storage = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) + + message_queue = providers.Singleton( + MessageQueueManager, cloud_provider=config.cloud_config.cloud_provider + ) + + image_knowledge_base_retrieve = providers.Singleton( + ImageRagRetrieve, + knowledge_base_embeddings_repository, + ) + + cloud_storage_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/knowledge_base_schema.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/knowledge_base_schema.py new file mode 100644 index 00000000..abcdb207 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/models/knowledge_base_schema.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel +from typing import Optional + + +class NewKnowledge(BaseModel): + name: str + description: str + type: str + vector_size: int + vector_size_1: Optional[int] = None + + +class NewInference(BaseModel): + prompt: str diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/__init__.py new file mode 100644 index 00000000..af077a6f --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/__init__.py @@ -0,0 +1,7 @@ +""" +SQL queries package for knowledge base module. +""" + +from .generate_query import QueryGenerator + +__all__ = ['QueryGenerator'] diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/generate_query.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/generate_query.py new file mode 100644 index 00000000..32761ce8 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/queries/generate_query.py @@ -0,0 +1,291 @@ +import re +from typing import Any, Dict, Tuple, List, Optional + +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from datasource.odata_parser import ODataQueryParser + + +class QueryGenerator: + """Class to generate SQL queries for knowledge base operations.""" + + def __init__(self): + self.odata_parser = ODataQueryParser(type='sql', dynamic_var_char=':') + + def build_metadata_clause( + self, + template: str, + filter_params: Dict[str, Any], + formatter, + ) -> str: + clause = template + for field in filter_params.keys(): + pattern = rf'(? Tuple[str, Dict[str, Any]]: + """ + Generate SQL query for combined vector and keyword search with reranking. + + Args: + query: The search query text + query_embeddings: The vector embeddings of the query + params: Dictionary containing query parameters: + - threshold: Cosine similarity threshold + - top_k: Number of results to return + - vector_weight: Weight for vector similarity score + - keyword_weight: Weight for keyword similarity score + - kb_id: Knowledge base ID + + Returns: + Tuple of (SQL query string, query parameters) + """ + # Validate and sanitize parameters + threshold = float(params.get('threshold', 0.2)) + # Use limit if provided, otherwise use top_k + effective_limit = limit if limit is not None else int(params.get('top_k', 10)) + vector_weight = float(params.get('vector_weight', 0.7)) + keyword_weight = float(params.get('keyword_weight', 0.3)) + kb_id = str(params.get('kb_id')) + effective_offset = offset or 0 + + # Prepare query parameters + query_params = { + 'query_embed': str(query_embeddings[0]), + 'threshold': threshold, + 'kb_id': kb_id, + 'vector_weight': vector_weight, + 'keyword_weight': keyword_weight, + 'query': query, + 'offset': effective_offset, + 'limit': effective_limit, + } + metadata_filter_clause_final = '' + metadata_filter_clause_inner = '' + if filter: + where_clause, filter_params = self.odata_parser.prepare_odata_filter(filter) + if where_clause and filter_params: + metadata_filter_clause_final = self.build_metadata_clause( + where_clause, + filter_params, + lambda field: ( + f"(COALESCE(k.metadata_value ->> '{field}', " + f"v.metadata_value ->> '{field}'))" + ), + ) + metadata_filter_clause_inner = self.build_metadata_clause( + where_clause, + filter_params, + lambda field: f"(d.metadata_value ->> '{field}')", + ) + query_params.update(filter_params) + sql_query = f""" + WITH vector_results AS ( + SELECT + e.id as embedding_id, + e.chunk_text, + e.chunk_index, + d.id as document_id, + d.file_path, + d.knowledge_base_id, + d.metadata_value, + 1 - (e.embedding_vector <=> :query_embed ::vector) as vector_score + FROM + {KnowledgeBaseEmbeddings.__tablename__} e + JOIN + {KnowledgeBaseDocuments.__tablename__} d ON e.document_id = d.id + WHERE + d.knowledge_base_id = :kb_id {'AND (' + metadata_filter_clause_inner + ')' if metadata_filter_clause_inner else ''} + ORDER BY + vector_score DESC + LIMIT :limit + ), + keyword_results AS ( + SELECT + e.id as embedding_id, + e.chunk_text, + e.chunk_index, + d.id as document_id, + d.file_path, + d.knowledge_base_id, + d.metadata_value, + ts_rank_cd(e.token, query_tokens) AS text_score + FROM + {KnowledgeBaseEmbeddings.__tablename__} e + JOIN + {KnowledgeBaseDocuments.__tablename__} d ON e.document_id = d.id, + plainto_tsquery('english', :query) AS query_tokens + WHERE + e.token @@ query_tokens + AND d.knowledge_base_id = :kb_id {'AND (' + metadata_filter_clause_inner + ')' if metadata_filter_clause_inner else ''} + ORDER BY + text_score DESC + LIMIT :limit + ) + SELECT + COALESCE(k.embedding_id, v.embedding_id) as embedding_id, + COALESCE(k.chunk_text, v.chunk_text) as chunk_text, + COALESCE(k.chunk_index, v.chunk_index) as chunk_index, + COALESCE(k.document_id, v.document_id) as document_id, + COALESCE(k.file_path, v.file_path) as file_path, + COALESCE(k.metadata_value, v.metadata_value) as metadata_value, + COALESCE(k.knowledge_base_id, v.knowledge_base_id) as knowledge_base_id, + COALESCE(v.vector_score, 0) * :vector_weight + + COALESCE(k.text_score, 0) * :keyword_weight AS combined_score, + COALESCE(v.vector_score, 0) as vector_score, + COALESCE(k.text_score, 0) as text_score + FROM + keyword_results k + FULL OUTER JOIN + vector_results v ON k.embedding_id = v.embedding_id + WHERE + (COALESCE(v.vector_score, 0) * :vector_weight + + COALESCE(k.text_score, 0) * :keyword_weight) > :threshold {'AND (' + metadata_filter_clause_final + ')' if metadata_filter_clause_final else ''} + ORDER BY + combined_score DESC + LIMIT :limit OFFSET :offset + """ + + return sql_query, query_params + + def get_image_embedding( + self, query_embeddings: list, params: Dict[str, Any], filter: str + ): + kb_id = str(params.get('kb_id')) + top_k = int(params.get('top_k', 10)) + + # Prepare query parameters + params = { + 'query_embedding': query_embeddings, + 'kb_id': kb_id, + 'top_k': top_k, + } + metadata_filter_clause_final = '' + if filter: + where_clause, filter_params = self.odata_parser.prepare_odata_filter(filter) + if where_clause and filter_params: + metadata_filter_clause_final = self.build_metadata_clause( + where_clause, + filter_params, + lambda field: f"(d.metadata_value ->> '{field}')", + ) + params.update(filter_params) + sql_query = f""" + WITH ranked_embeddings AS ( + SELECT + e.id AS embedding_id, + e.chunk_text, + e.chunk_index, + d.id AS document_id, + d.file_path, + d.file_name, + d.knowledge_base_id, + d.metadata_value, + e.embedding_vector <-> :query_embedding ::vector AS distance + FROM + {KnowledgeBaseEmbeddings.__tablename__} e + JOIN + {KnowledgeBaseDocuments.__tablename__} d ON e.document_id = d.id + WHERE + d.knowledge_base_id = :kb_id {'AND (' + metadata_filter_clause_final + ')' if metadata_filter_clause_final else ''} + ORDER BY distance ASC + ) + SELECT + * + FROM + ranked_embeddings + LIMIT :top_k + """ + + return sql_query, params + + def get_image_embedding_dino( + self, + query_embeddings: list, + params: Dict[str, Any], + filter: str, + offset: Optional[int] = None, + limit: Optional[int] = None, + ): + kb_id = str(params.get('kb_id')) + # Use limit if provided, otherwise use top_k + effective_limit = limit if limit is not None else int(params.get('top_k', 10)) + reference_id_list: List[Any] = params.get('reference_id_list', []) + effective_offset = offset if offset is not None else 0 + + if reference_id_list: + processed_reference_ids = [ + str(id) for id in reference_id_list + ] # Use list instead of tuple + else: + processed_reference_ids = [] + + params = { + 'query_embedding': query_embeddings, + 'kb_id': kb_id, + 'top_k': effective_limit, + 'reference_ids': processed_reference_ids, + 'offset': effective_offset, + 'limit': effective_limit, + } + + metadata_filter_clause_final = '' + if filter: + where_clause, filter_params = self.odata_parser.prepare_odata_filter(filter) + if where_clause and filter_params: + metadata_filter_clause_final = self.build_metadata_clause( + where_clause, + filter_params, + lambda field: f"(d.metadata_value ->> '{field}')", + ) + params.update(filter_params) + # Use ANY operator for PostgreSQL array matching + reference_filter = ( + 'AND e.document_id = ANY(:reference_ids)' if processed_reference_ids else '' + ) + + sql_query = f""" + WITH ranked_embeddings AS ( + SELECT + e.id AS embedding_id, + e.chunk_text, + e.chunk_index, + d.id AS document_id, + d.file_path, + d.file_name, + d.knowledge_base_id, + d.metadata_value, + (1 - (e.embedding_vector_1 <=> :query_embedding ::vector)) AS similarity + FROM {KnowledgeBaseEmbeddings.__tablename__} e + JOIN {KnowledgeBaseDocuments.__tablename__} d ON e.document_id = d.id + WHERE + d.knowledge_base_id = :kb_id {reference_filter} {'AND (' + metadata_filter_clause_final + ')' if metadata_filter_clause_final else ''} + ORDER BY similarity DESC + ) + SELECT + * + FROM + ranked_embeddings + LIMIT :limit OFFSET :offset + """ + + return sql_query, params + + @staticmethod + def get_update_tokens_query() -> str: + """ + Generate SQL query to update text search tokens. + + Returns: + SQL query string + """ + return "UPDATE knowledge_base_embeddings SET token = to_tsvector('english', chunk_text)" diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/__init__.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/image_rag_retrieve.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/image_rag_retrieve.py new file mode 100644 index 00000000..1c78a93c --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/image_rag_retrieve.py @@ -0,0 +1,112 @@ +import httpx +from typing import Optional +import uuid +from knowledge_base_module.queries.generate_query import QueryGenerator +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from sqlalchemy.exc import SQLAlchemyError + + +class ImageRagRetrieve: + def __init__( + self, + knowledge_base_embeddings_repository: SQLAlchemyRepository[ + KnowledgeBaseEmbeddings + ], + ): + self.reranked_image = [] + self.query_generator = QueryGenerator() + self.knowledge_base_embeddings_repository = knowledge_base_embeddings_repository + + async def retrieve_images( + self, + image_data: str, + inference_url: str, + kb_id: uuid.UUID, + threshold: Optional[float] = None, + top_k: Optional[int] = None, + query_filter: Optional[str] = '', + offset: Optional[int] = None, + limit: Optional[int] = None, + ): + data = {'image_data': image_data} + internal_api_url = f'{inference_url}/inference/v1/query/embeddings' + async with httpx.AsyncClient( + timeout=httpx.Timeout(60.0, connect=30.0), + limits=httpx.Limits( + max_keepalive_connections=20, + max_connections=100, + keepalive_expiry=60, + ), + ) as client: + response = await client.post(internal_api_url, json=data) + embedding = response.json().get('data', {}).get('response', []) + + if embedding: + self.reranked_image = await self.image_retrieve( + embedding[0]['clip'], kb_id, threshold, top_k, query_filter + ) + reference_id_list = [ + str(data['document_id']) for data in self.reranked_image + ] + self.reranked_image = await self.image_retrieve_dino( + embedding[1]['dino'], + kb_id, + reference_id_list, + query_filter, + offset, + limit, + ) + return self.reranked_image + else: + return [] + + async def image_retrieve(self, embedding, kb_id, threshold, top_k, query_filter): + """Search for similar images in the vector database""" + + # Use L2 distance for similarity search + params = { + 'threshold': threshold or 0.5, + 'top_k': top_k or 50, + 'kb_id': kb_id, + } + try: + # Get and execute the combined search query + sql_query, query_params = self.query_generator.get_image_embedding( + embedding, params, query_filter + ) + retrieved_docs = ( + await self.knowledge_base_embeddings_repository.execute_query( + sql_query, query_params + ) + ) + return retrieved_docs + + except SQLAlchemyError as e: + # self.logger.error(f'Database error: {e}') + raise RuntimeError(f'Failed to execute the query for retrieval images: {e}') + + async def image_retrieve_dino( + self, embedding, kb_id, reference_id_list, query_filter, offset=None, limit=None + ): + """Search for similar images in the vector database""" + + # Use L2 distance for similarity search + params = { + 'kb_id': kb_id, + 'reference_id_list': reference_id_list, + } + try: + # Get and execute the combined search query + sql_query, query_params = self.query_generator.get_image_embedding_dino( + embedding, params, query_filter, offset, limit + ) + retrieved_docs = ( + await self.knowledge_base_embeddings_repository.execute_query( + sql_query, query_params + ) + ) + return retrieved_docs + + except SQLAlchemyError as e: + raise RuntimeError(f'Failed to execute the query for retrieval images: {e}') diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_retrieve.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_retrieve.py new file mode 100644 index 00000000..bb9692fa --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_retrieve.py @@ -0,0 +1,182 @@ +import logging +from typing import Optional +import uuid + +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.knowledge_base_embeddings import KnowledgeBaseEmbeddings +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from knowledge_base_module.embeddings.llm import LLMModelFunc +from knowledge_base_module.embeddings.embed import EmbeddingFunc +from knowledge_base_module.queries.generate_query import QueryGenerator +from sqlalchemy import text +from sqlalchemy.exc import SQLAlchemyError + + +class KBRagResponse: + """Configuration class for EmailRag settings.""" + + def __init__( + self, + knowledge_base_documents_repository: SQLAlchemyRepository[ + KnowledgeBaseDocuments + ], + knowledge_base_embeddings_repository: SQLAlchemyRepository[ + KnowledgeBaseEmbeddings + ], + embedding_url, + ): + self.embedding = EmbeddingFunc(embedding_url) + self.knowledge_base_documents_repository = knowledge_base_documents_repository + self.knowledge_base_embeddings_repository = knowledge_base_embeddings_repository + self.logger = logging.getLogger(__name__) + self.query_generator = QueryGenerator() + self.llm_model_func = LLMModelFunc() + self.reranked_docs = [] + + async def retrieve_documents( + self, + query: str, + kb_id: uuid.UUID, + threshold: Optional[float] = None, + vector_weight: Optional[float] = None, + keyword_weight: Optional[float] = None, + query_filter: Optional[str] = '', + offset: Optional[int] = None, + limit: Optional[int] = None, + ) -> list: + """ + Retrieve documents for a specific knowledge base + + Args: + query: Text query for search + kb_id: Knowledge base ID to filter results + threshold: Cosine similarity threshold (default: 0.2) + top_k: Number of results to return (default: 10) + vector_weight: Weight for vector similarity score (default: 0.7) + keyword_weight: Weight for keyword similarity score (default: 0.3) + + Returns: + List of retrieved documents + """ + if not isinstance(query, str): + raise ValueError('Query must be in string format') + + query_embeddings = self.embedding.generate_chunk_embeddings([query]) + params = { + 'threshold': threshold or 0.2, + 'vector_weight': vector_weight or 0.7, + 'keyword_weight': keyword_weight or 0.3, + 'kb_id': kb_id, + } + + reranked_docs = await self.combined_search_with_reranking( + query, query_embeddings, params, query_filter, offset, limit + ) + for doc in reranked_docs: + for key, value in doc.items(): + if isinstance(value, uuid.UUID): + doc[key] = str(value) + return reranked_docs + + async def combined_search_with_reranking( + self, + query: str, + query_embeddings: str, + params: dict, + filter: str, + offset: Optional[int] = None, + limit: Optional[int] = None, + ) -> list: + """ + Perform combined vector and keyword search with reranking in a single SQL query, + filtered by knowledge base ID. + + Args: + query: The search query text + query_embeddings: The vector embeddings of the query + params: Dictionary containing query parameters + + Returns: + List of retrieved documents + """ + try: + async with self.knowledge_base_embeddings_repository.session() as session: + # Update text search tokens + update_stmt = text(self.query_generator.get_update_tokens_query()) + await session.execute(update_stmt) + await session.commit() + + # Get and execute the combined search query + sql_query, query_params = ( + self.query_generator.get_combined_search_query( + query, query_embeddings, params, filter, offset, limit + ) + ) + retrieved_docs = ( + await self.knowledge_base_embeddings_repository.execute_query( + sql_query, query_params + ) + ) + return retrieved_docs + + except SQLAlchemyError as e: + self.logger.error(f'Database error: {e}') + raise RuntimeError( + f'Failed to execute the query for retrieval documents: {e}' + ) + + async def query( + self, + query: str, + kb_id: uuid.UUID, + prompt: str, + threshold: Optional[float] = None, + vector_weight: Optional[float] = None, + keyword_weight: Optional[float] = None, + model: Optional[str] = 'gemini-2.5-pro', + query_filter: Optional[str] = '', + offset: Optional[int] = None, + limit: Optional[int] = None, + llm_config: Optional[LlmInferenceConfig] = None, + ): + """ + Rag Response for a specific knowledge base + + Args: + query: Text query for search + kb_id: Knowledge base ID to filter results + threshold: Cosine similarity threshold (default: 0.2) + top_k: Number of results to return (default: 10) + vector_weight: Weight for vector similarity score (default: 0.7) + keyword_weight: Weight for keyword similarity score (default: 0.3) + model: Model name (used if llm_config not provided) + query_filter: Optional filter query + offset: Optional offset for pagination + limit: Optional limit for pagination + llm_config: Optional LLM inference configuration + + Returns: + Rag Response in json or string format + """ + retrieved_docs = await self.retrieve_documents( + query, + kb_id, + threshold, + vector_weight, + keyword_weight, + query_filter, + offset, + limit, + ) + content = '\n--New Chunk--\n'.join( + [data['chunk_text'] for data in retrieved_docs] + ) + sys_prompt = prompt.format( + content_data=content, + ) + + response = await self.llm_model_func.generate_response( + query, sys_prompt, model, llm_config + ) + return response diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_storage.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_storage.py new file mode 100644 index 00000000..135125bf --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/services/kb_rag_storage.py @@ -0,0 +1,300 @@ +from datetime import datetime +import logging +from typing import Any, Dict, List, Tuple + +from knowledge_base_module.embeddings.embed import EmbeddingFunc +import tiktoken + + +class KBRagStorage: + """Configuration class for EmailRag settings.""" + + def __init__(self, embedding_url): + self.llm_model_name = 'flora-q8' + self.embedding_model = 'mxbai-embed-large' + self.embedding_dim = 1024 + self.max_token_size = 8500 + self.tiktoken_model = 'gpt-4o' + self.chunk_size = 1200 + self.chunk_overlap = 128 + self.embedding = EmbeddingFunc(embedding_url) + self.logger = logging.getLogger(__name__) + + def encode_string_by_tiktoken(self, content: str, model_name: str = 'gpt-4o'): + encoder = tiktoken.encoding_for_model(model_name) + tokens = encoder.encode(content) + return tokens + + def decode_tokens_by_tiktoken(self, tokens: list[int], model_name: str = 'gpt-4o'): + decoder = tiktoken.encoding_for_model(model_name) + content = decoder.decode(tokens) + return content + + def clean_text(self, content: str) -> str: + """ + Clean and normalize text content from documents. + + Args: + content: The raw text content to clean + + Returns: + Cleaned and normalized text + """ + if not content or not isinstance(content, str): + return '' + + # Basic cleaning + content = content.replace('\x00', '') # Remove null bytes + return content + + def extract_documents( + self, contents: List[str] + ) -> List[List[Tuple[str, Dict[str, Any]]]]: + """ + Extract content from files with improved error handling and parallel processing. + + Args: + contents: List of text contents to process + + Returns: + List of document tuples containing (doc_id, doc_content) + """ + if not contents: + self.logger.warning('No contents provided for extraction') + return [] + + # Process contents + results = [] + for content in contents: + processed_content = content + if processed_content: + results.append(processed_content) + + if not results: + return [] + + # Clean and process results + cleaned_content = [(self.clean_text(content)) for content in results] + + # Create document structure + docs = { + f'doc_{index}': { + 'content': content, + 'content_length': len(content), + 'created_at': datetime.now().isoformat(), + 'updated_at': datetime.now().isoformat(), + } + for index, (content) in enumerate(cleaned_content) + } + + return docs + + def chunk_with_langchain_recursive( + self, + content: str, + tiktoken_model: str, + chunk_size: int, + chunk_overlap: int, + separators: List[str] = ['\n\n', '\n', ' ', ''], + ) -> List[Dict[str, Any]]: + """ + Chunk content using LangChain's RecursiveCharacterTextSplitter. + + Args: + content: The text content to chunk + tiktoken_model: The tiktoken model to use + chunk_size: Approximate chunk size in characters + chunk_overlap: Character overlap between chunks + separators: List of separators for recursive splitting + + Returns: + List of chunks with token counts and content + """ + try: + return self.__chunk_with_custom_splitter( + content, + self.max_token_size, + self.chunk_overlap, + tiktoken_model, + chunk_size, + chunk_overlap, + separators, + ) + except Exception as e: + self.logger.error( + f'Error using LangChain RecursiveCharacterTextSplitter: {e}' + ) + return self._fallback_chunking( + content, self.max_token_size, self.chunk_overlap, tiktoken_model + ) + + def __chunk_with_custom_splitter( + self, + content: str, + max_token_size: int, + overlap_token_size: int, + tiktoken_model: str, + chunk_size: int, + chunk_overlap: int, + separators: List[str], + ) -> List[Dict[str, Any]]: + """Handle chunking using custom recursive text splitter.""" + results = [] + + # Default separators if none provided + if not separators: + separators = ['\n\n', '\n', ' ', ''] + + def recursive_split(text: str, seps: List[str]) -> List[str]: + if not seps or len(text) <= chunk_size: + return [text] if text.strip() else [] + + sep = seps[0] + splits = text.split(sep) if sep else list(text) + + # Keep separator with text + if sep: + splits = [splits[0]] + [sep + s for s in splits[1:] if s] + + chunks = [] + current = '' + + for split in splits: + if len(split) > chunk_size: + # Add current chunk if exists + if current: + chunks.append(current) + # Add overlap + if chunk_overlap > 0: + current = ( + current[-chunk_overlap:] + if len(current) > chunk_overlap + else '' + ) + else: + current = '' + + # Recursively split large piece + chunks.extend(recursive_split(split, seps[1:])) + + elif len(current) + len(split) <= chunk_size: + current += split + else: + # Start new chunk + if current: + chunks.append(current) + # Add overlap + if chunk_overlap > 0 and len(current) > chunk_overlap: + current = current[-chunk_overlap:] + split + else: + current = split + else: + current = split + + if current: + chunks.append(current) + + return [c for c in chunks if c.strip()] + + # Split content into chunks + chunks = recursive_split(content, separators) + # Process each chunk + for chunk_index, chunk_text in enumerate(chunks): + tokens = self.encode_string_by_tiktoken(chunk_text) + + if len(tokens) > max_token_size: + results.extend( + self.__split_large_chunk( + tokens, max_token_size, overlap_token_size, tiktoken_model + ) + ) + else: + results.append( + { + 'tokens': len(tokens), + 'content': chunk_text.strip(), + 'chunk_order_index': len(results), + 'chunk_index': chunk_index, + 'metadata': {'start_index': content.find(chunk_text)}, + } + ) + + return results + + def _split_large_chunk( + self, tokens: List[int], max_tokens: int, overlap: int, model: str + ) -> List[Dict[str, Any]]: + """Split a large chunk into smaller pieces.""" + results = [] + for start in range(0, len(tokens), max_tokens - overlap): + end = min(start + max_tokens, len(tokens)) + chunk_content = self.decode_tokens_by_tiktoken( + tokens[start:end], + model_name=model, + ) + results.append( + { + 'tokens': end - start, + 'content': chunk_content.strip(), + 'chunk_order_index': len(results), + } + ) + return results + + def _fallback_chunking( + self, content: str, max_tokens: int, overlap: int, model: str + ) -> List[Dict[str, Any]]: + """Fallback chunking method when LangChain fails.""" + results = [] + tokens = self.encode_string_by_tiktoken(content) + for index, start in enumerate(range(0, len(tokens), max_tokens - overlap)): + end = min(start + max_tokens, len(tokens)) + chunk_content = self.decode_tokens_by_tiktoken( + tokens[start:end], model_name=model + ) + results.append( + { + 'tokens': end - start, + 'content': chunk_content.strip(), + 'chunk_order_index': index, + } + ) + return results + + def process_document(self, content: List[str]) -> List[Dict[str, Any]]: + """ + Process documents and generate embeddings. + + Args: + content: List of text contents to process + + Returns: + List of processed documents with embeddings + """ + all_docs = self.extract_documents(content) + processed_docs = [] + # for doc in all_docs: + for doc_id, doc_content in all_docs.items(): + chunks = { + f'chunk_{ind}': { + **data, + 'full_doc_id': doc_id, + 'file_path': getattr(doc_content, 'file_path', 'unknown_source'), + } + for ind, data in enumerate( + self.chunk_with_langchain_recursive( + doc_content['content'], + self.tiktoken_model, + self.chunk_size, + self.chunk_overlap, + ) + ) + } + + data_list, _ = self.embedding.generate_document_embeddings( + chunks, + ) + + processed_docs.extend(data_list) + + return processed_docs diff --git a/wavefront/server/modules/knowledge_base_module/pyproject.toml b/wavefront/server/modules/knowledge_base_module/pyproject.toml new file mode 100644 index 00000000..382d9c3f --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/pyproject.toml @@ -0,0 +1,60 @@ +[project] +name = "knowledge-base-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "datasource", + "flo-cloud", + "pydantic>=2.11.3,<3.0.0", + "pymupdf>=1.25.5,<2.0.0", + "tiktoken~=0.9.0", + "numpy>=1.24,<2.0", + "pandas~=2.2.3", + "ollama~=0.4.8", + "textract~=1.6.5", + "flo-ai>=1.1.0-rc5", + "google-cloud-pubsub~=2.30.0", + "boto3<=1.38.40", + "pyyaml>=6.0.3,<7", + "flo-cloud", + "python-multipart", +] + +[tool.uv.sources] +flo-cloud = { workspace = true } +auth-module = { workspace = true } +common-module = { workspace = true } +db-repo-module = { workspace = true } +user-management-module = { workspace = true } +datasource = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.4,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0", + "auth-module", + "common-module", + "db-repo-module", + "user-management-module", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["knowledge_base_module"] diff --git a/wavefront/server/modules/knowledge_base_module/tests/conftest.py b/wavefront/server/modules/knowledge_base_module/tests/conftest.py new file mode 100644 index 00000000..a4108dca --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/tests/conftest.py @@ -0,0 +1,263 @@ +import json +from unittest.mock import Mock +from io import BytesIO +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +from dependency_injector import providers +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from knowledge_base_module.controllers.knowledge_base_controller import ( + knowledge_base_router, +) +from knowledge_base_module.controllers.knowledge_base_document_controller import ( + kb_document_router, +) +from knowledge_base_module.controllers.rag_retreival_controller import ( + rag_retrieval_router, +) +from llm_inference_config_module.container import LlmInferenceConfigContainer +from db_repo_module.models.datasource import Datasource # noqa: F401 +from db_repo_module.models.dynamic_query_yaml import DynamicQueryYaml # noqa: F401 +from unittest.mock import AsyncMock + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + # setting up the dependencies for the requireauth middleware + auth_container = AuthContainer() + common_container = CommonContainer() + user_container = UserContainer() + + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + + # mocking the cache manager + cache_manager_mock = Mock() + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'session_id': test_session_id} + ) + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + + common_container.cache_manager.override(db_repo_container.cache_manager) + + # mocking the token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + # overriding the auth container dependencies + auth_container.token_service.override(mock_token_service) + auth_container.db_client.override(db_repo_container.db_client) + auth_container.cache_manager.override(cache_manager_mock) + + # overriding the user container dependencies + user_container.db_client.override(db_repo_container.db_client) + user_container.cache_manager.override(cache_manager_mock) + + # Initialize LLM Inference Config Container + llm_inference_config_container = LlmInferenceConfigContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + ) + # Override with mock cache manager to avoid Redis connection + llm_inference_config_container.cache_manager.override(cache_manager_mock) + + knowledge_base_container = KnowledgeBaseContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + ) + mock_cloud_storage_manager_instance = Mock() + mock_cloud_storage_manager_instance.file_protocol = Mock(return_value='gs') + + knowledge_base_container.cloud_storage_manager.override( + providers.Singleton(lambda: mock_cloud_storage_manager_instance) + ) + mock_config_service = Mock() + mock_config_service.config = { + 'cloud_config': {'cloud_provider': 'gcp'}, + 'gcp': {'gcp_asset_storage_bucket': 'test_bucket'}, + 'aws': {'aws_asset_storage_bucket': 'test_bucket'}, + 'model': {'inference_service_url': 'http://mock-inference-url.com'}, + } + knowledge_base_container.config.override( + providers.Singleton(lambda: mock_config_service.config) + ) + + auth_container.wire( + packages=[ + 'user_management_module.authorization', + ] + ) + + user_container.wire( + packages=[ + 'user_management_module.authorization', + # 'auth_module.controllers', + ] + ) + common_container.wire( + packages=[ + 'user_management_module.authorization', + 'knowledge_base_module.controllers', + ] + ) + knowledge_base_container.wire( + packages=[ + 'knowledge_base_module.controllers', + ], + ) + llm_inference_config_container.wire( + packages=[ + 'knowledge_base_module.controllers', + ], + ) + + # Mock CloudStorageManager for kb_document_router + mock_cloud_storage = Mock() + mock_cloud_storage.save_small_file = Mock() + mock_cloud_storage.save_large_file = Mock() + mock_cloud_storage.get_file = Mock(return_value=BytesIO(b'file content')) + knowledge_base_container.cloud_storage.override( + providers.Singleton(lambda: mock_cloud_storage) + ) + + # Mock MessageQueueManager for kb_document_router + mock_message_queue = Mock() + mock_message_queue.add_message = Mock(return_value='message_id_123') + knowledge_base_container.message_queue.override( + providers.Singleton(lambda: mock_message_queue) + ) + + # Mock KBRagResponse for rag_retrieval_router + mock_kb_rag_response = AsyncMock() + mock_kb_rag_response.retrieve_documents.return_value = [{'doc': 'test doc'}] + mock_kb_rag_response.query.return_value = {'response': 'test response'} + knowledge_base_container.knowledge_base_retrieve.override( + providers.Singleton(lambda: mock_kb_rag_response) + ) + + # Mock ImageRagRetrieve for rag_retrieval_router + mock_image_rag_retrieve = AsyncMock() + mock_image_rag_retrieve.retrieve_images.return_value = { + 'image_response': 'test image response' + } + knowledge_base_container.image_knowledge_base_retrieve.override( + providers.Singleton(lambda: mock_image_rag_retrieve) + ) + mock_cloud_storage_manager_instance = Mock() + mock_cloud_storage_manager_instance.file_protocol.return_value = 'gs' + + test_config_dict = { + 'model': {'inference_service_url': 'http://mock-inference-url.com'}, + 'cloud_config': {'cloud_provider': 'gcp'}, + 'gcp': { + 'gcp_asset_storage_bucket': 'test_bucket', + 'email_topic_id': 'test_topic', + }, + 'aws': { + 'aws_asset_storage_bucket': 'test_bucket', + 'queue_url': 'test_queue_url', + }, + } + knowledge_base_container.config.from_dict(test_config_dict) + + yield ( + auth_container, + common_container, + user_container, + knowledge_base_container, + llm_inference_config_container, + ) + auth_container.unwire() + common_container.unwire() + user_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequestIdMiddleware) + app.add_middleware(RequireAuthMiddleware) + app.include_router(knowledge_base_router, prefix='/floware') + app.include_router(kb_document_router, prefix='/floware') + app.include_router(rag_retrieval_router, prefix='/floware') + return TestClient(app) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _, _, _, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token diff --git a/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_controller.py b/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_controller.py new file mode 100644 index 00000000..88d260c7 --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_controller.py @@ -0,0 +1,358 @@ +from uuid import uuid4 +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import status + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_create_knowledge_base( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + new_kb_payload = { + 'name': 'Test Knowledge Base', + 'description': 'This is a test knowledge base', + 'type': 'document', + 'vector_size': 1536, + } + + response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert response_data['data']['message'] == 'Created the knowledge base successfully' + + +@pytest.mark.asyncio +async def test_create_knowledge_base_already_exists( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base first + new_kb_payload = { + 'name': 'Existing Knowledge Base', + 'description': 'This is an existing knowledge base', + 'type': 'document', + 'vector_size': 1536, + } + response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert response.status_code == status.HTTP_200_OK + + # Try to create another knowledge base with the same name + response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base with the same name already exists' + ) + + +@pytest.mark.asyncio +async def test_get_knowledge_base_by_id( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base first + new_kb_payload = { + 'name': 'Knowledge Base to Retrieve', + 'description': 'This is a knowledge base to retrieve', + 'type': 'document', + 'vector_size': 1536, + } + create_response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert create_response.status_code == status.HTTP_200_OK + created_kb_id = create_response.json()['data']['knowledge_base_id'] + + # Retrieve the knowledge base by ID + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{created_kb_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + retrieved_kb = get_response.json() + assert retrieved_kb['id'] == created_kb_id + assert retrieved_kb['name'] == new_kb_payload['name'] + assert retrieved_kb['description'] == new_kb_payload['description'] + assert retrieved_kb['type'] == new_kb_payload['type'] + + +@pytest.mark.asyncio +async def test_get_knowledge_base_by_non_existent_id( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_id = str(uuid4()) + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{non_existent_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + get_response.json()['detail'] + == "Knowledge Base with the mentioned id doesn't exist" + ) + + +@pytest.mark.asyncio +async def test_get_all_knowledge_bases_default_pagination( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a few knowledge bases + for i in range(3): + new_kb_payload = { + 'name': f'Knowledge Base {i}', + 'description': f'Description {i}', + 'type': 'document', + 'vector_size': 1536, + } + create_response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert create_response.status_code == status.HTTP_200_OK + + # Retrieve all knowledge bases with default pagination + get_response = test_client.get( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 3 + assert response_data['data']['resources'][0]['name'] == 'Knowledge Base 0' + + +@pytest.mark.asyncio +async def test_get_all_knowledge_bases_custom_pagination( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create more knowledge bases than the limit + for i in range(2): + new_kb_payload = { + 'name': f'Paginatable Knowledge Base {i}', + 'description': f'Description {i}', + 'type': 'document', + 'vector_size': 1536, + } + create_response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert create_response.status_code == status.HTTP_200_OK + + # Retrieve with offset and limit + get_response = test_client.get( + '/floware/v1/knowledge-bases?offset=1&limit=2', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 1 + assert ( + response_data['data']['resources'][0]['name'] == 'Paginatable Knowledge Base 1' + ) + + +@pytest.mark.asyncio +async def test_get_all_knowledge_bases_no_exist( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Retrieve all knowledge bases when none exist + get_response = test_client.get( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 0 + + +@pytest.mark.asyncio +async def test_update_existing_knowledge_base( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base first + new_kb_payload = { + 'name': 'Knowledge Base to Update', + 'description': 'Original description', + 'type': 'document', + 'vector_size': 1536, + } + create_response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert create_response.status_code == status.HTTP_200_OK + created_kb_id = create_response.json()['data']['knowledge_base_id'] + + # Update the knowledge base + updated_kb_payload = { + 'name': 'Updated Knowledge Base Name', + 'description': 'Updated description', + 'type': 'image', + 'vector_size': 768, + } + update_response = test_client.put( + f'/floware/v1/knowledge-bases/{created_kb_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + json=updated_kb_payload, + ) + + assert update_response.status_code == status.HTTP_200_OK + response_data = update_response.json() + assert response_data['data']['message'] == 'Updated the Knowledge Base successfully' + assert response_data['data']['knowledge_base_id'] == created_kb_id + + # Verify the update by retrieving the knowledge base + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{created_kb_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert get_response.status_code == status.HTTP_200_OK + retrieved_kb = get_response.json() + assert retrieved_kb['name'] == updated_kb_payload['name'] + assert retrieved_kb['description'] == updated_kb_payload['description'] + assert retrieved_kb['type'] == updated_kb_payload['type'] + + +@pytest.mark.asyncio +async def test_update_non_existent_knowledge_base( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_id = str(uuid4()) + updated_kb_payload = { + 'name': 'Non Existent KB', + 'description': 'Description', + 'type': 'document', + 'vector_size': 1536, + } + update_response = test_client.put( + f'/floware/v1/knowledge-bases/{non_existent_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + json=updated_kb_payload, + ) + + assert update_response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + update_response.json()['meta']['error'] + == "Knowledge Base with the given id doesn't exist" + ) + + +@pytest.mark.asyncio +async def test_delete_existing_knowledge_base( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base first + new_kb_payload = { + 'name': 'Knowledge Base to Delete', + 'description': 'Description', + 'type': 'document', + 'vector_size': 1536, + } + create_response = test_client.post( + '/floware/v1/knowledge-bases', + headers={'Authorization': f'Bearer {auth_token}'}, + json=new_kb_payload, + ) + assert create_response.status_code == status.HTTP_200_OK + created_kb_id = create_response.json()['data']['knowledge_base_id'] + + # Delete the knowledge base + delete_response = test_client.delete( + f'/floware/v1/knowledge-bases/{created_kb_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert delete_response.status_code == status.HTTP_204_NO_CONTENT + response_data = delete_response.json() + assert response_data['data']['message'] == 'Deleted the Knowledge Base successfully' + assert response_data['data']['knowledge_base_id'] == created_kb_id + + # Verify deletion by trying to retrieve it + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{created_kb_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert get_response.status_code == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.asyncio +async def test_delete_non_existent_knowledge_base( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_id = str(uuid4()) + delete_response = test_client.delete( + f'/floware/v1/knowledge-bases/{non_existent_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert delete_response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + delete_response.json()['meta']['error'] + == "Knowledge Base with the given id doesn't exist" + ) diff --git a/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_document_controller.py b/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_document_controller.py new file mode 100644 index 00000000..acdd5bbd --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/tests/test_knowledge_base_document_controller.py @@ -0,0 +1,402 @@ +from uuid import uuid4 +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import status, UploadFile +from starlette.datastructures import Headers +from io import BytesIO + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_upload_document_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + file_content = b'This is a test document content.' + test_file = UploadFile( + filename='test_document.txt', + file=BytesIO(file_content), + headers=Headers({'content-type': 'text/plain'}), + ) + test_file.size = len(file_content) + + response = test_client.post( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + files={'file': (test_file.filename, test_file.file, test_file.content_type)}, + ) + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert ( + response_data['data']['message'] + == 'Created the knowledge base documents and embeddings successfully' + ) + assert response_data['data']['knowledge_base_id'] == str(kb_id) + + +@pytest.mark.asyncio +async def test_upload_document_kb_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_kb_id = uuid4() + file_content = b'This is a test document content.' + test_file = UploadFile( + filename='test_document.txt', + file=BytesIO(file_content), + headers=Headers({'content-type': 'text/plain'}), + ) + test_file.size = len(file_content) + + response = test_client.post( + f'/floware/v1/knowledge-bases/{non_existent_kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + files={'file': (test_file.filename, test_file.file, test_file.content_type)}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base with the given id does not exist' + ) + + +@pytest.mark.asyncio +async def test_upload_document_kb_id_not_exists( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Upload a document for the first time + file_content = b'First document content.' + test_file_1 = UploadFile( + filename='document_1.txt', + file=BytesIO(file_content), + headers=Headers({'content-type': 'text/plain'}), + ) + test_file_1.size = len(file_content) + + response_1 = test_client.post( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + files={ + 'file': (test_file_1.filename, test_file_1.file, test_file_1.content_type) + }, + ) + assert response_1.status_code == status.HTTP_200_OK + file_content_2 = b'Second document content.' + test_file_2 = UploadFile( + filename='document_2.txt', + file=BytesIO(file_content_2), + headers=Headers({'content-type': 'text/plain'}), + ) + test_file_2.size = len(file_content_2) + kb_id = uuid4() + response_2 = test_client.post( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + files={ + 'file': (test_file_2.filename, test_file_2.file, test_file_2.content_type) + }, + ) + + assert response_2.status_code == status.HTTP_400_BAD_REQUEST + response_data_2 = response_2.json() + assert ( + response_data_2['meta']['error'] + == 'Knowledge Base with the given id does not exist' + ) + + +@pytest.mark.asyncio +async def test_get_documents_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB for Get', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Upload a document + file_content = b'Content of doc 1.' + test_file = UploadFile( + filename='doc1.txt', + file=BytesIO(file_content), + headers=Headers({'content-type': 'text/plain'}), + ) + test_file.size = len(file_content) + response = test_client.post( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + files={'file': (test_file.filename, test_file.file, test_file.content_type)}, + ) + assert response.status_code == status.HTTP_200_OK + + # Upload another document + file_content_2 = b'Content of doc 2.' + test_file_2 = UploadFile( + filename='doc2.pdf', + file=BytesIO(file_content_2), + headers=Headers({'content-type': 'application/pdf'}), + ) + test_file_2.size = len(file_content_2) + async with test_session() as session: + new_kb_document_2 = KnowledgeBaseDocuments( + knowledge_base_id=kb_id, + file_path='gcs_url/doc2.pdf', + file_name='doc2.pdf', + file_type='pdf', + file_size=len(file_content_2), + ) + session.add(new_kb_document_2) + await session.commit() + + # Retrieve documents + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 2 + assert response_data['data']['resources'][0]['file_name'] == 'doc1.txt' + assert response_data['data']['resources'][1]['file_name'] == 'doc2.pdf' + + +@pytest.mark.asyncio +async def test_get_documents_filter_by_type( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Filter', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Manually add documents of different types + async with test_session() as session: + doc1 = KnowledgeBaseDocuments( + knowledge_base_id=kb_id, + file_path='gcs_url/file1.txt', + file_name='file1.txt', + file_type='plain', + file_size=100, + ) + doc2 = KnowledgeBaseDocuments( + knowledge_base_id=kb_id, + file_path='gcs_url/file2.pdf', + file_name='file2.pdf', + file_type='pdf', + file_size=200, + ) + doc3 = KnowledgeBaseDocuments( + knowledge_base_id=kb_id, + file_path='gcs_url/file3.txt', + file_name='file3.txt', + file_type='plain', + file_size=150, + ) + session.add_all([doc1, doc2, doc3]) + await session.commit() + + # Retrieve documents filtered by type 'plain' + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{kb_id}/documents?file_type=plain', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 2 + assert response_data['data']['resources'][0]['file_name'] == 'file1.txt' + assert response_data['data']['resources'][1]['file_name'] == 'file3.txt' + + +@pytest.mark.asyncio +async def test_get_documents_no_documents_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base but no documents + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB No Docs', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Retrieve documents + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert get_response.status_code == status.HTTP_200_OK + response_data = get_response.json() + assert len(response_data['data']['resources']) == 0 + + +@pytest.mark.asyncio +async def test_delete_document_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB for Delete', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Manually add a document to be deleted + doc_id = uuid4() + async with test_session() as session: + new_kb_document = KnowledgeBaseDocuments( + id=doc_id, + knowledge_base_id=kb_id, + file_path='gcs_url/doc_to_delete.txt', + file_name='doc_to_delete.txt', + file_type='plain', + file_size=100, + ) + session.add(new_kb_document) + await session.commit() + + # Delete the document + delete_response = test_client.delete( + f'/floware/v1/knowledge-bases/{kb_id}/documents/{doc_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert delete_response.status_code == status.HTTP_204_NO_CONTENT + response_data = delete_response.json() + assert ( + response_data['data']['message'] + == 'Deleted the Knowledge Base Documents and embeddings records successfully' + ) + assert response_data['data']['knowledge_base_id'] == str(kb_id) + + # Verify deletion by trying to retrieve it + get_response = test_client.get( + f'/floware/v1/knowledge-bases/{kb_id}/documents', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert get_response.status_code == status.HTTP_200_OK + assert len(get_response.json()['data']['resources']) == 0 + + +@pytest.mark.asyncio +async def test_delete_document_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB for Delete Non Existent', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + non_existent_doc_id = uuid4() + delete_response = test_client.delete( + f'/floware/v1/knowledge-bases/{kb_id}/documents/{non_existent_doc_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert delete_response.status_code == status.HTTP_400_BAD_REQUEST + response_data = delete_response.json() + assert ( + response_data['meta']['error'] == 'Document not found for this knowledge base' + ) diff --git a/wavefront/server/modules/knowledge_base_module/tests/test_rag_retrieval_controller.py b/wavefront/server/modules/knowledge_base_module/tests/test_rag_retrieval_controller.py new file mode 100644 index 00000000..8d04a59f --- /dev/null +++ b/wavefront/server/modules/knowledge_base_module/tests/test_rag_retrieval_controller.py @@ -0,0 +1,646 @@ +from unittest.mock import AsyncMock +from uuid import uuid4 +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from db_repo_module.models.knowledge_base_documents import KnowledgeBaseDocuments +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from dependency_injector import providers +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from fastapi import status + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_retrieve_query_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB for Retrieve', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + query = 'test query' + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/retrieve?query={query}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert response_data['data']['documents'] == [{'doc': 'test doc'}] + + +@pytest.mark.asyncio +async def test_retrieve_query_empty_query( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + kb_id = uuid4() + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/retrieve?query=', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert response_data['meta']['error'] == 'Query or Image data should not be empty' + + +@pytest.mark.asyncio +async def test_retrieve_image_success( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + await create_session(test_session, test_user_id, test_session_id) + + _, _, _, kb_container, _ = setup_containers + + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Image Retrieve', + description='Test Description', + type='image', + vector_size=0, + ) + session.add(new_kb) + await session.commit() + + mock_image_rag_retrieve = AsyncMock() + mock_image_rag_retrieve.retrieve_images.return_value = [ + { + 'doc': 'image doc', + 'file_path': 'images/test.png', + } + ] + kb_container.image_knowledge_base_retrieve.override( + providers.Singleton(lambda: mock_image_rag_retrieve) + ) + + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/retrieve', + headers={'Authorization': f'Bearer {auth_token}'}, + json={'image_data': 'base64-image-data'}, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + documents = response_data['data']['documents'] + assert len(documents) == 1 + assert documents[0]['doc'] == 'image doc' + + mock_image_rag_retrieve.retrieve_images.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_retrieve_image_kb_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_kb_id = uuid4() + response = test_client.post( + f'/floware/v1/knowledge-base/{non_existent_kb_id}/retrieve', + headers={'Authorization': f'Bearer {auth_token}'}, + json={'image_data': 'base64-image-data'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base with the mentioned id doesnt exist' + ) + + +@pytest.mark.asyncio +async def test_retrieve_query_kb_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_kb_id = uuid4() + query = 'test query' + response = test_client.post( + f'/floware/v1/knowledge-base/{non_existent_kb_id}/retrieve?query={query}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base with the mentioned id doesnt exist' + ) + + +@pytest.mark.asyncio +async def test_retrieve_query_no_matching_documents( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + await create_session(test_session, test_user_id, test_session_id) + + _, _, _, kb_container, _ = setup_containers + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB for No Docs', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + # Override the mock to return empty results for retrieve_documents + mock_kb_rag_response = AsyncMock() + mock_kb_rag_response.retrieve_documents.return_value = [] + kb_container.knowledge_base_retrieve.override( + providers.Singleton(lambda: mock_kb_rag_response) + ) + + query = 'query with no matches' + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/retrieve?query={query}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_200_OK + + +@pytest.mark.asyncio +async def test_retrieve_image_data_empty( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + kb_id = uuid4() + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/retrieve', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert response_data['meta']['error'] == 'Query or Image data should not be empty' + + +@pytest.mark.asyncio +async def test_rag_response_with_query_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base and an inference + kb_id = uuid4() + inference_id = uuid4() + config_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB RAG Query', + description='Test Description', + type='document', + vector_size=1536, + ) + llm_config = LlmInferenceConfig( + id=config_id, + llm_model='gemini-2.5-flash', + display_name='test_root_gemini', + api_key='test-api-key-placeholder', + type='gemini', + base_url='https://generativelanguage.googleapis.com/', + ) + new_inference = KnowledgeBaseInferences( + inference_id=inference_id, + knowledge_base_id=kb_id, + inference_content={'prompt': 'System prompt'}, + config_id=config_id, + ) + session.add(new_kb) + await session.commit() + session.add(llm_config) + await session.commit() + session.add(new_inference) + await session.commit() + + query = 'user query' + model = 'gemini-2.5-pro' + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/augment/{inference_id}?query={query}&model={model}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert response_data['data']['response'] == {'response': 'test response'} + + +@pytest.mark.asyncio +async def test_rag_response_empty_query( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + kb_id = uuid4() + inference_id = uuid4() + config_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB RAG Query', + description='Test Description', + type='document', + vector_size=1536, + ) + llm_config = LlmInferenceConfig( + id=config_id, + llm_model='gemini-2.5-flash', + display_name='test_root_gemini', + api_key='test-api-key-placeholder', + type='gemini', + base_url='https://generativelanguage.googleapis.com/', + ) + new_inference = KnowledgeBaseInferences( + inference_id=inference_id, + knowledge_base_id=kb_id, + inference_content={'prompt': 'System prompt'}, + config_id=config_id, + ) + session.add(new_kb) + await session.commit() + session.add(llm_config) + await session.commit() + session.add(new_inference) + await session.commit() + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/augment/{inference_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Query must be provided either in request body or as query parameter' + ) + + +@pytest.mark.asyncio +async def test_rag_response_kb_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_kb_id = uuid4() + inference_id = uuid4() + query = 'test query' + model = 'gemini-2.5-pro' + response = test_client.post( + f'/floware/v1/knowledge-base/{non_existent_kb_id}/augment/{inference_id}?query={query}&model={model}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base with the mentioned id doesnt exist' + ) + + +@pytest.mark.asyncio +async def test_rag_response_inference_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB RAG No Inference', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + non_existent_inference_id = uuid4() + query = 'test query' + model = 'gemini-2.5-pro' + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/augment/{non_existent_inference_id}?query={query}&model={model}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == 'Knowledge Base inference with the mentioned knowledge_base_id and inference_id doesnt exist' + ) + + +@pytest.mark.asyncio +async def test_create_system_prompt_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + inference_id = uuid4() + config_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB RAG Query', + description='Test Description', + type='document', + vector_size=1536, + ) + llm_config = LlmInferenceConfig( + id=config_id, + llm_model='gemini-2.5-flash', + display_name='test_root_gemini', + api_key='test-api-key-placeholder', + type='gemini', + base_url='https://generativelanguage.googleapis.com/', + ) + new_inference = KnowledgeBaseInferences( + inference_id=inference_id, + knowledge_base_id=kb_id, + inference_content={'prompt': 'System prompt'}, + config_id=config_id, + ) + session.add(new_kb) + await session.commit() + session.add(llm_config) + await session.commit() + session.add(new_inference) + await session.commit() + + prompt_payload = {'prompt': 'This is a test system prompt.'} + response = test_client.post( + f'/floware/v1/knowledge-base/{kb_id}/llm_config/{config_id}/inference', + headers={'Authorization': f'Bearer {auth_token}'}, + json=prompt_payload, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert ( + response_data['data']['message'] + == 'Created the knowledge base inference table successfully' + ) + assert 'inference_id' in response_data['data'] + + +@pytest.mark.asyncio +async def test_get_system_prompt_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base and a system prompt + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Get Prompt', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + new_inference = KnowledgeBaseInferences( + knowledge_base_id=kb_id, + inference_content={'message': 'Existing system prompt'}, + ) + session.add(new_inference) + await session.commit() + + response = test_client.get( + f'/floware/v1/knowledge-base/{kb_id}/inference', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert len(response_data['data']['resources']) == 1 + assert response_data['data']['resources'][0]['inference_content'] == { + 'message': 'Existing system prompt' + } + + +@pytest.mark.asyncio +async def test_get_system_prompt_no_prompt_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base but no system prompt + kb_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Get No Prompt', + description='Test Description', + type='document', + vector_size=1536, + ) + session.add(new_kb) + await session.commit() + + response = test_client.get( + f'/floware/v1/knowledge-base/{kb_id}/inference', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert len(response_data['data']['resources']) == 0 + + +@pytest.mark.asyncio +async def test_store_embeddings_success( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base + kb_id = uuid4() + doc_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Embeddings', + description='Test Description', + type='document', + vector_size=3, + vector_size_1=0, + ) + session.add(new_kb) + await session.commit() + new_kb_document_2 = KnowledgeBaseDocuments( + id=doc_id, + knowledge_base_id=kb_id, + file_path='gcs_url/doc2.pdf', + file_name='doc2.pdf', + file_type='pdf', + file_size=1000, + ) + session.add(new_kb_document_2) + await session.commit() + + embedding_payload = { + 'embedding_vector': [[0.1, 0.2, 0.3]], + 'document_id': str(doc_id), + 'kb_id': str(kb_id), + 'chunk_text': ['chunk 1'], + 'chunk_index': ['chunk_0'], + } + + doc_wise_payload = { + 'embeddings': [ + embedding_payload + ] # <-- Wrap it in a list under the 'embeddings' key + } + + response = test_client.post( + '/floware/v1/store_embedding', + headers={'Authorization': f'Bearer {auth_token}'}, + json=doc_wise_payload, + ) + + assert response.status_code == status.HTTP_200_OK + response_data = response.json() + assert ( + response_data['data']['message'] + == 'Created the knowledge base documents and embeddings successfully' + ) + + +@pytest.mark.asyncio +async def test_store_embeddings_kb_not_found( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + non_existent_kb_id = uuid4() + doc_id = uuid4() + embedding_payload = { + 'embedding_vector': [[0.1, 0.2, 0.3]], + 'document_id': str(doc_id), + 'kb_id': str(non_existent_kb_id), + 'chunk_text': ['chunk 1'], + 'chunk_index': ['chunk_0'], + } + + doc_wise_payload = {'embeddings': [embedding_payload]} + + response = test_client.post( + '/floware/v1/store_embedding', + headers={'Authorization': f'Bearer {auth_token}'}, + json=doc_wise_payload, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] == 'There is no knowledge bases based on the id' + ) + + +@pytest.mark.asyncio +async def test_store_embeddings_vector_size_mismatch( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + await create_session(test_session, test_user_id, test_session_id) + + # Create a knowledge base with a specific vector size + kb_id = uuid4() + doc_id = uuid4() + async with test_session() as session: + new_kb = KnowledgeBase( + id=kb_id, + name='Test KB Vector Size Mismatch', + description='Test Description', + type='document', + vector_size=10, + vector_size_1=0, + ) + session.add(new_kb) + await session.commit() + + embedding_payload = { + 'embedding_vector': [[0.1, 0.2, 0.3]], # Incorrect size + 'document_id': str(doc_id), + 'kb_id': str(kb_id), + 'chunk_text': ['chunk 1'], + 'chunk_index': ['chunk_0'], + } + + doc_wise_payload = {'embeddings': [embedding_payload]} + + response = test_client.post( + '/floware/v1/store_embedding', + headers={'Authorization': f'Bearer {auth_token}'}, + json=doc_wise_payload, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST + response_data = response.json() + assert ( + response_data['meta']['error'] + == "The vector size on the embedding doesn't match the required embedding vector size" + ) diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/container.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/container.py new file mode 100644 index 00000000..81b03882 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/container.py @@ -0,0 +1,37 @@ +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector import containers +from dependency_injector import providers +from llm_inference_config_module.services.inference_proxy_service import ( + InferenceProxyService, +) +from llm_inference_config_module.services.llm_inference_config_service import ( + LlmInferenceConfigService, +) + + +class LlmInferenceConfigContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + # External dependencies + db_client = providers.Dependency() + cache_manager = providers.Dependency() + + # Repository + llm_inference_config_repository = providers.Singleton( + SQLAlchemyRepository[LlmInferenceConfig], + model=LlmInferenceConfig, + db_client=db_client, + ) + + # Services + llm_inference_config_service = providers.Singleton( + LlmInferenceConfigService, + llm_inference_config_repository=llm_inference_config_repository, + cache_manager=cache_manager, + ) + + inference_proxy_service = providers.Singleton( + InferenceProxyService, + llm_inference_config_service=llm_inference_config_service, + ) diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/inference_proxy_controller.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/inference_proxy_controller.py new file mode 100644 index 00000000..46674b15 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/inference_proxy_controller.py @@ -0,0 +1,55 @@ +from fastapi import APIRouter, Depends, Request, status +from fastapi.responses import Response +from dependency_injector.wiring import Provide, inject +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from fastapi.responses import JSONResponse +from llm_inference_config_module.container import LlmInferenceConfigContainer +from llm_inference_config_module.services.inference_proxy_service import ( + InferenceProxyService, +) + + +inference_proxy_router = APIRouter(prefix='/v1/llm-inference') + + +@inference_proxy_router.post('/{model_id}/{model_call_path:path}') +@inject +async def proxy_inference_request( + model_id: str, + model_call_path: str, + request: Request, + inference_proxy_service: InferenceProxyService = Depends( + Provide[LlmInferenceConfigContainer.inference_proxy_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +) -> Response: + """ + Proxy inference requests to configured model endpoints. + + This endpoint accepts requests in the format: + /v1/llm-inference/{model_id}/{model_call_path} + + Where: + - model_id: The ID of the model configuration in the database + - model_call_path: The remaining path to be appended to the model's base_url + + Example: + POST /v1/llm-inference/12345/chat/completions + + Will look up model ID 12345, get its base_url (e.g., https://api.openai.com), + and forward the request to: https://api.openai.com/chat/completions + """ + try: + response = await inference_proxy_service.proxy_inference_request( + model_id=model_id, model_call_path=model_call_path, request=request + ) + return response + + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/llm_inference_config_controller.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/llm_inference_config_controller.py new file mode 100644 index 00000000..7c13c889 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/controllers/llm_inference_config_controller.py @@ -0,0 +1,311 @@ +import uuid + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Request, status +from fastapi.responses import JSONResponse +from llm_inference_config_module.container import LlmInferenceConfigContainer +from llm_inference_config_module.models.schemas import ( + CreateLlmInferenceConfigPayload, + UpdateLlmInferenceConfigPayload, + InferenceEngineType, + UNSET, +) +from llm_inference_config_module.services.llm_inference_config_service import ( + LlmInferenceConfigService, +) +from user_management_module.constants.auth import SERVICE_AUTH_ROLE_ID + +llm_inference_config_router = APIRouter() + + +@inject +async def check_admin( + role_id: str, + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[AuthContainer.role_repository] + ), +) -> bool: + if role_id == SERVICE_AUTH_ROLE_ID: + return True + role = await role_repository.find_one(id=role_id) + if not role: + return False + return role.name == 'admin' + + +@llm_inference_config_router.post('/v1/llm-inference-configs') +@inject +async def create_llm_inference_config( + request: Request, + payload: CreateLlmInferenceConfigPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Admin access required to manage LLM inference configurations' + ), + ) + + try: + config_dict = await llm_inference_config_service.create_config( + llm_model=payload.llm_model, + display_name=payload.display_name, + api_key=payload.api_key, + type=payload.type.value, + base_url=payload.base_url, + parameters=payload.parameters, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'LLM inference configuration created successfully', + 'config': config_dict, + } + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) + + +@llm_inference_config_router.get('/v1/llm-inference-configs') +@inject +async def get_llm_inference_configs( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Admin access required to manage LLM inference configurations' + ), + ) + + try: + configs_list = await llm_inference_config_service.list_configs() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'configs': configs_list}), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) + + +@llm_inference_config_router.get('/v1/llm-inference-configs/{config_id}') +@inject +async def get_llm_inference_config( + request: Request, + config_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + try: + config_dict = await llm_inference_config_service.get_config(config_id) + if not config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(config_dict), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) + + +@llm_inference_config_router.patch('/v1/llm-inference-configs/{config_id}') +@inject +async def update_llm_inference_config( + request: Request, + config_id: uuid.UUID, + payload: UpdateLlmInferenceConfigPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Admin access required to manage LLM inference configurations' + ), + ) + + try: + # Validate payload fields before update + update_data = {} + if payload.llm_model is not UNSET: + update_data['llm_model'] = payload.llm_model + if payload.display_name is not UNSET: + if payload.display_name is None or ( + isinstance(payload.display_name, str) + and not payload.display_name.strip() + ): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'display_name cannot be null or empty' + ), + ) + update_data['display_name'] = payload.display_name + if payload.api_key is not UNSET: + update_data['api_key'] = payload.api_key + if payload.type is not UNSET: + if payload.type is None: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'type cannot be null or empty' + ), + ) + if hasattr(payload.type, 'value'): + # It's an enum object + update_data['type'] = payload.type.value + elif isinstance(payload.type, str) and payload.type in [ + e.value for e in InferenceEngineType + ]: + # It's a valid enum value string + update_data['type'] = payload.type + else: + # Invalid value + valid_values = [e.value for e in InferenceEngineType] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid type value. Must be one of: {valid_values}' + ), + ) + if payload.base_url is not UNSET: + update_data['base_url'] = payload.base_url + if payload.parameters is not UNSET: + update_data['parameters'] = payload.parameters + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'No valid fields provided for update' + ), + ) + + # Update via service (handles caching) + config_dict = await llm_inference_config_service.update_config( + config_id, **update_data + ) + + if not config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'LLM inference configuration updated successfully', + 'config': config_dict, + } + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) + + +@llm_inference_config_router.delete('/v1/llm-inference-configs/{config_id}') +@inject +async def delete_llm_inference_config( + request: Request, + config_id: uuid.UUID, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + llm_inference_config_service: LlmInferenceConfigService = Depends( + Provide[LlmInferenceConfigContainer.llm_inference_config_service] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Admin access required to manage LLM inference configurations' + ), + ) + + try: + deleted = await llm_inference_config_service.delete_config(config_id) + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'LLM inference configuration not found: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'LLM inference configuration deleted successfully', + 'config_id': str(config_id), + } + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse(str(e)), + ) diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/models/schemas.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/models/schemas.py new file mode 100644 index 00000000..aa5b24d7 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/models/schemas.py @@ -0,0 +1,69 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import Any, Dict, Optional, Union + +from pydantic import BaseModel, Field + +# Sentinel value to distinguish between "not provided" and "explicitly null" +UNSET = object() + + +class InferenceEngineType(str, Enum): + GEMINI = 'gemini' + OPENAI = 'openai' + OLLAMA = 'ollama' + VLLM = 'vllm' + ANTHROPIC = 'anthropic' + AZURE_OPENAI = 'azure_openai' + GROQ = 'groq' + + +class CreateLlmInferenceConfigPayload(BaseModel): + llm_model: str = Field(..., description='The name/identifier of the LLM model') + display_name: str = Field( + ..., description='Human-readable display name for the configuration' + ) + api_key: Optional[str] = Field( + None, description='API key for the inference engine (optional)' + ) + type: InferenceEngineType = Field(..., description='Type of inference engine') + base_url: Optional[str] = Field( + None, description='Base URL for the inference API (optional)' + ) + parameters: Optional[Dict[str, Any]] = Field( + None, description='LLM parameters like temperature, max_tokens, etc. (optional)' + ) + + +class UpdateLlmInferenceConfigPayload(BaseModel): + llm_model: Union[str, Any] = Field( + default=UNSET, description='The name/identifier of the LLM model' + ) + display_name: Union[str, Any] = Field( + default=UNSET, description='Human-readable display name for the configuration' + ) + api_key: Union[str, None, Any] = Field( + default=UNSET, description='API key for the inference engine' + ) + type: Union[InferenceEngineType, Any] = Field( + default=UNSET, description='Type of inference engine' + ) + base_url: Union[str, None, Any] = Field( + default=UNSET, description='Base URL for the inference API' + ) + parameters: Union[Optional[Dict[str, Any]], Any] = Field( + default=UNSET, description='LLM parameters like temperature, max_tokens, etc.' + ) + + +class LlmInferenceConfigResponse(BaseModel): + id: uuid.UUID + llm_model: str + display_name: str + type: str + base_url: Optional[str] + parameters: Optional[Dict[str, Any]] + is_deleted: bool + created_at: datetime + updated_at: datetime diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/inference_proxy_service.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/inference_proxy_service.py new file mode 100644 index 00000000..82454c93 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/inference_proxy_service.py @@ -0,0 +1,401 @@ +import uuid +import re +from typing import Optional, Dict +import httpx +import json +from fastapi import Request, HTTPException, status +from fastapi.responses import StreamingResponse, Response +from common_module.log.logger import logger +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from llm_inference_config_module.models.schemas import InferenceEngineType +from llm_inference_config_module.services.llm_inference_config_service import ( + LlmInferenceConfigService, +) + + +class InferenceProxyService: + def __init__( + self, + llm_inference_config_service: LlmInferenceConfigService, + ): + self.llm_inference_config_service = llm_inference_config_service + + # Create a single reusable client + timeout = httpx.Timeout(60.0, connect=30.0) + limits = httpx.Limits( + max_keepalive_connections=20, max_connections=100, keepalive_expiry=60 + ) + self._client = httpx.AsyncClient(timeout=timeout, limits=limits) + + # Provider-specific auth header mapping + self._auth_header_mapping = { + InferenceEngineType.OPENAI: self._prepare_openai_auth, + InferenceEngineType.GEMINI: self._prepare_gemini_auth, + InferenceEngineType.ANTHROPIC: self._prepare_anthropic_auth, + InferenceEngineType.AZURE_OPENAI: self._prepare_azure_openai_auth, + InferenceEngineType.OLLAMA: self._prepare_ollama_auth, + InferenceEngineType.VLLM: self._prepare_vllm_auth, + } + + # Provider-specific model extraction mapping + self._model_extraction_mapping = { + InferenceEngineType.OPENAI: self._extract_openai_model, + InferenceEngineType.GEMINI: self._extract_gemini_model, + InferenceEngineType.ANTHROPIC: self._extract_anthropic_model, + InferenceEngineType.AZURE_OPENAI: self._extract_azure_openai_model, + InferenceEngineType.OLLAMA: self._extract_ollama_model, + InferenceEngineType.VLLM: self._extract_vllm_model, + } + + # Provider-specific streaming detection mapping + self._streaming_detection_mapping = { + InferenceEngineType.OPENAI: self._detect_openai_streaming, + InferenceEngineType.GEMINI: self._detect_gemini_streaming, + InferenceEngineType.ANTHROPIC: self._detect_anthropic_streaming, + InferenceEngineType.AZURE_OPENAI: self._detect_azure_openai_streaming, + InferenceEngineType.OLLAMA: self._detect_ollama_streaming, + InferenceEngineType.VLLM: self._detect_vllm_streaming, + } + + async def close(self): + """Close the HTTP client - call this on app shutdown""" + await self._client.aclose() + + async def get_model_config(self, model_id: str) -> Optional[LlmInferenceConfig]: + """Get model configuration by ID.""" + + try: + model_uuid = uuid.UUID(model_id) + except ValueError: + return None + model_config = await self.llm_inference_config_service.get_config(model_uuid) + if not model_config: + return None + + if isinstance(model_config, LlmInferenceConfig): + return model_config + + return LlmInferenceConfig(**model_config) + + def construct_target_url(self, base_url: str, model_call_path: str) -> str: + """Construct the target URL by combining base_url and model_call_path.""" + # Remove trailing slash from base_url and leading slash from model_call_path + base_url = base_url.rstrip('/') + model_call_path = model_call_path.lstrip('/') + + return f'{base_url}/{model_call_path}' + + def detect_streaming( + self, + provider_type: InferenceEngineType, + parsed_data: dict, + model_call_path: str, + ) -> bool: + """Detect if this is a streaming request using provider-specific logic.""" + streaming_detector = self._streaming_detection_mapping.get(provider_type) + + if not streaming_detector: + logger.warning(f'No streaming detector found for provider: {provider_type}') + # Fallback to OpenAI-style detection for unknown providers + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + return streaming_detector(parsed_data, model_call_path) + + def _prepare_openai_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare OpenAI authentication headers.""" + headers['authorization'] = f'Bearer {api_key}' + + def _prepare_gemini_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare Gemini authentication headers.""" + headers['x-goog-api-key'] = api_key + del headers['authorization'] + + def _prepare_anthropic_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare Anthropic authentication headers.""" + headers['x-api-key'] = api_key + + def _prepare_azure_openai_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare Azure OpenAI authentication headers.""" + headers['api-key'] = api_key + + def _prepare_ollama_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare Ollama authentication headers (typically no auth required).""" + # Ollama typically runs locally without authentication + # If auth is needed, it can be added here + pass + + def _prepare_vllm_auth(self, headers: Dict[str, str], api_key: str) -> None: + """Prepare vLLM authentication headers.""" + # vLLM auth depends on deployment configuration + # Default to Bearer token format + if api_key: + headers['authorization'] = f'Bearer {api_key}' + + def _extract_openai_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from OpenAI-style request body.""" + return parsed_data.get('model') + + def _extract_gemini_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from Gemini URL path. Expected format: v1beta/models/{model}:generateContent""" + # Pattern: models/{model}:generateContent or models/{model}:streamGenerateContent + pattern = r'/models/([^/:]+):' + match = re.search(pattern, model_call_path) + return match.group(1) if match else None + + def _extract_anthropic_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from Anthropic-style request body.""" + return parsed_data.get('model') + + def _extract_azure_openai_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from Azure OpenAI URL path. Expected format: deployments/{model}/chat/completions""" + pattern = r'/deployments/([^/]+)/' + match = re.search(pattern, model_call_path) + return match.group(1) if match else None + + def _extract_ollama_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from Ollama request - typically in body like OpenAI.""" + return parsed_data.get('model') + + def _extract_vllm_model( + self, parsed_data: dict, model_call_path: str + ) -> Optional[str]: + """Extract model from vLLM request - typically in body like OpenAI.""" + return parsed_data.get('model') + + def _detect_openai_streaming(self, parsed_data: dict, model_call_path: str) -> bool: + """Detect OpenAI streaming from request body.""" + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + def _detect_gemini_streaming(self, parsed_data: dict, model_call_path: str) -> bool: + """Detect Gemini streaming from URL path (streamGenerateContent vs generateContent).""" + return 'streamGenerateContent' in model_call_path + + def _detect_anthropic_streaming( + self, parsed_data: dict, model_call_path: str + ) -> bool: + """Detect Anthropic streaming from request body.""" + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + def _detect_azure_openai_streaming( + self, parsed_data: dict, model_call_path: str + ) -> bool: + """Detect Azure OpenAI streaming from request body.""" + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + def _detect_ollama_streaming(self, parsed_data: dict, model_call_path: str) -> bool: + """Detect Ollama streaming from request body.""" + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + def _detect_vllm_streaming(self, parsed_data: dict, model_call_path: str) -> bool: + """Detect vLLM streaming from request body.""" + return isinstance(parsed_data, dict) and parsed_data.get('stream', False) + + def prepare_headers( + self, request: Request, model_config: LlmInferenceConfig + ) -> Dict[str, str]: + """Prepare headers for the forwarded request with provider-specific auth.""" + headers = {} + + # Copy most headers from the original request + excluded_headers = { + 'host', + 'content-length', + 'transfer-encoding', + 'connection', + 'upgrade', + 'proxy-authenticate', + 'proxy-authorization', + } + + for key, value in request.headers.items(): + if key.lower() not in excluded_headers: + headers[key] = value + + # Add provider-specific authentication if api_key is provided + if model_config.api_key: + provider_type = InferenceEngineType(model_config.type) + auth_method = self._auth_header_mapping.get(provider_type) + + if auth_method: + auth_method(headers, model_config.api_key) + else: + logger.warning(f'No auth method found for provider: {provider_type}') + + return headers + + async def forward_request( + self, + target_url: str, + headers: Dict[str, str], + body: bytes, + query_params: Dict[str, str], + is_streaming: bool = False, + ) -> Response: + """Forward the request to the target URL.""" + try: + if is_streaming: + # Build the request + req = self._client.build_request( + method='POST', + url=target_url, + headers=headers, + content=body, + params=query_params, + ) + + # Send with stream=True but don't close the response + response = await self._client.send(req, stream=True) + + async def generate(): + try: + async for chunk in response.aiter_bytes(): + yield chunk + finally: + # Ensure response is closed after streaming completes + await response.aclose() + + return StreamingResponse( + generate(), + status_code=response.status_code, + headers={ + k: v + for k, v in response.headers.items() + if k.lower() + not in ['content-length', 'transfer-encoding', 'connection'] + }, + media_type=response.headers.get('content-type', 'application/json'), + ) + else: + response = await self._client.post( + url=target_url, + headers=headers, + content=body, + params=query_params, + ) + return Response( + content=response.content, + status_code=response.status_code, + headers={ + k: v + for k, v in response.headers.items() + if k.lower() + not in [ + 'content-length', + 'transfer-encoding', + 'connection', + 'content-encoding', + ] + }, + ) + except httpx.TimeoutException as e: + logger.error(f'Timeout when forwarding to {target_url}: {str(e)}') + raise HTTPException( + status_code=status.HTTP_504_GATEWAY_TIMEOUT, + detail='Request to target model API timed out', + ) + except httpx.RequestError as e: + logger.error(f'Request error when forwarding to {target_url}: {str(e)}') + raise HTTPException( + status_code=status.HTTP_502_BAD_GATEWAY, + detail='Error communicating with target model API', + ) + + async def proxy_inference_request( + self, model_id: str, model_call_path: str, request: Request + ) -> Response: + """Main method to proxy an inference request.""" + logger.info( + f'Proxying inference request: model_id={model_id}, path={model_call_path}' + ) + + model_config = await self.get_model_config(model_id) + if not model_config: + logger.warning(f'Model not found: {model_id}') + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f'Model configuration not found for model ID: {model_id}', + ) + + if not model_config.base_url: + logger.error(f'Base URL not configured for model: {model_id}') + raise HTTPException( + status_code=status.HTTP_502_BAD_GATEWAY, + detail=f'Base URL not configured for model ID: {model_id}', + ) + + target_url = self.construct_target_url(model_config.base_url, model_call_path) + + # Read body once and parse JSON once + body = await request.body() + + try: + parsed_data = json.loads(body) if body else {} + except json.JSONDecodeError as e: + logger.error(f'Invalid JSON in request body: {str(e)}') + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Invalid JSON in request body: {str(e)}', + ) + + # Validate model compatibility using provider-specific extraction + provider_type = InferenceEngineType(model_config.type) + model_extractor = self._model_extraction_mapping.get(provider_type) + + if not model_extractor: + logger.warning(f'No model extractor found for provider: {provider_type}') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Unsupported provider type: {provider_type}', + ) + + request_model = model_extractor(parsed_data, model_call_path) + + if not request_model: + detail_msg = ( + 'Missing "model" field in request body' + if provider_type + in [InferenceEngineType.OPENAI, InferenceEngineType.ANTHROPIC] + else f'Could not extract model from URL path: {model_call_path}' + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=detail_msg, + ) + + if request_model != model_config.llm_model: + logger.warning( + f'Model mismatch for {provider_type}: requested={request_model}, configured={model_config.llm_model}' + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Model mismatch: requested "{request_model}" but proxy is configured for "{model_config.llm_model}"', + ) + + headers = self.prepare_headers(request, model_config) + query_params = dict(request.query_params) + + # Check streaming using provider-specific detection + is_streaming = self.detect_streaming( + provider_type, parsed_data, model_call_path + ) + + response = await self.forward_request( + target_url=target_url, + headers=headers, + body=body, # Pass original body bytes for forwarding + query_params=query_params, + is_streaming=is_streaming, + ) + + logger.info(f'Proxied to {target_url}, status: {response.status_code}') + return response diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/llm_inference_config_service.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/llm_inference_config_service.py new file mode 100644 index 00000000..7e7ad42c --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/services/llm_inference_config_service.py @@ -0,0 +1,232 @@ +import json +from typing import List, Optional +from uuid import UUID + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from llm_inference_config_module.utils.cache_utils import ( + get_llm_inference_config_cache_key, + get_llm_inference_configs_list_cache_key, +) +from llm_inference_config_module.utils.cache_invalidation import ( + invalidate_call_processing_cache, +) + + +class LlmInferenceConfigService: + """Service for handling LLM inference configuration CRUD operations with caching""" + + def __init__( + self, + llm_inference_config_repository: SQLAlchemyRepository[LlmInferenceConfig], + cache_manager: CacheManager, + ): + """ + Initialize the LLM inference config service + + Args: + llm_inference_config_repository: Repository for LLM inference configs + cache_manager: Cache manager instance + """ + self.llm_inference_config_repository = llm_inference_config_repository + self.cache_manager = cache_manager + self.llm_inference_config_cache_time = 3600 * 24 + + async def create_config( + self, + llm_model: str, + display_name: str, + api_key: str, + type: str, + base_url: Optional[str] = None, + parameters: Optional[dict] = None, + ) -> dict: + """ + Create a new LLM inference configuration + + Args: + llm_model: LLM model name + display_name: Display name for the config + api_key: API key for the LLM provider + type: Type of inference engine + base_url: Base URL for the LLM provider (optional) + parameters: LLM parameters like temperature, max_tokens, etc. (optional) + + Returns: + Created LLM inference config as dict + """ + logger.info(f'Creating LLM inference config - model: {llm_model}, type: {type}') + + config = await self.llm_inference_config_repository.create( + llm_model=llm_model, + display_name=display_name, + api_key=api_key, + type=type, + base_url=base_url, + parameters=parameters, + ) + + # Convert to dict + config_dict = config.to_dict(exclude_api_key=False) + + # Cache the config + cache_key = get_llm_inference_config_cache_key(config.id) + self.cache_manager.add( + cache_key, + json.dumps(config_dict), + expiry=self.llm_inference_config_cache_time, + ) + + # Invalidate list cache + list_cache_key = get_llm_inference_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache( + 'llm_inference_config', config.id, 'create' + ) + + logger.info(f'Successfully created LLM inference config with id: {config.id}') + return config_dict + + async def get_config(self, config_id: UUID) -> Optional[dict]: + """ + Get an LLM inference configuration by ID (with caching) + + Args: + config_id: UUID of the configuration + + Returns: + LLM inference config as dict or None if not found + """ + cache_key = get_llm_inference_config_cache_key(config_id) + + # Try cache first + cached_config_str = self.cache_manager.get_str(cache_key) + if cached_config_str: + logger.info(f'Cache hit for LLM inference config: {config_id}') + return json.loads(cached_config_str) + + # Cache miss - fetch from DB + logger.info(f'Cache miss - fetching LLM inference config from DB: {config_id}') + config = await self.llm_inference_config_repository.find_one( + id=config_id, is_deleted=False + ) + + if config: + # Convert to dict and cache + config_dict = config.to_dict(exclude_api_key=False) + self.cache_manager.add( + cache_key, + json.dumps(config_dict), + expiry=self.llm_inference_config_cache_time, + ) + return config_dict + + return None + + async def list_configs(self) -> List[dict]: + """ + List all LLM inference configurations (with caching) + + Returns: + List of LLM inference configs as dicts + """ + list_cache_key = get_llm_inference_configs_list_cache_key() + + # Try cache first + cached_list_str = self.cache_manager.get_str(list_cache_key) + if cached_list_str: + logger.info('Cache hit for LLM inference configs list') + return json.loads(cached_list_str) + + # Cache miss - fetch from DB + logger.info('Cache miss - fetching LLM inference configs list from DB') + configs = await self.llm_inference_config_repository.find(is_deleted=False) + + # Convert to dicts and cache + configs_dicts = [config.to_dict(exclude_api_key=False) for config in configs] + self.cache_manager.add( + list_cache_key, + json.dumps(configs_dicts), + expiry=self.llm_inference_config_cache_time, + ) + + return configs_dicts + + async def update_config(self, config_id: UUID, **update_data) -> Optional[dict]: + """ + Update an LLM inference configuration + + Args: + config_id: UUID of the configuration + **update_data: Fields to update + + Returns: + Updated config as dict or None if not found + """ + logger.info(f'Updating LLM inference config: {config_id}') + + existing_config = await self.llm_inference_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return None + + updated_config = await self.llm_inference_config_repository.find_one_and_update( + {'id': config_id}, refresh=True, **update_data + ) + + # Invalidate caches + cache_key = get_llm_inference_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_llm_inference_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache( + 'llm_inference_config', config_id, 'update' + ) + + logger.info(f'Successfully updated LLM inference config: {config_id}') + return updated_config.to_dict(exclude_api_key=False) + + async def delete_config(self, config_id: UUID) -> bool: + """ + Delete an LLM inference configuration (soft delete) + + Args: + config_id: UUID of the configuration + + Returns: + True if deleted, False if not found + """ + logger.info(f'Deleting LLM inference config: {config_id}') + + existing_config = await self.llm_inference_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return False + + await self.llm_inference_config_repository.find_one_and_update( + {'id': config_id}, is_deleted=True + ) + + # Invalidate caches + cache_key = get_llm_inference_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_llm_inference_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache( + 'llm_inference_config', config_id, 'delete' + ) + + logger.info(f'Successfully deleted LLM inference config: {config_id}') + return True diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_invalidation.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_invalidation.py new file mode 100644 index 00000000..ba96c2d2 --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_invalidation.py @@ -0,0 +1,78 @@ +"""Utility for invalidating cache in call_processing app""" + +import os +import httpx +from uuid import UUID +from common_module.log.logger import logger + + +async def invalidate_call_processing_cache( + config_type: str, + config_id: UUID, + operation: str = 'update', +) -> bool: + """ + Invalidate cache in call_processing app + + Args: + config_type: Type of config (llm_inference_config) + config_id: UUID of the config + operation: Operation type (create, update, or delete) + + Returns: + True if successful, False otherwise (never raises exceptions) + Logs warnings on failures but doesn't break the main operation + """ + call_processing_base_url = os.getenv('CALL_PROCESSING_BASE_URL') + passthrough_secret = os.getenv('PASSTHROUGH_SECRET') + + if not call_processing_base_url or not passthrough_secret: + logger.warning( + f'Cache invalidation skipped for {config_type} {config_id}: ' + f'CALL_PROCESSING_BASE_URL or PASSTHROUGH_SECRET not configured' + ) + return False + + url = f'{call_processing_base_url.rstrip("/")}/api/cache/invalidate' + headers = { + 'Content-Type': 'application/json', + 'X-Passthrough': passthrough_secret, + } + payload = {'config_type': config_type, 'config_id': str(config_id)} + + try: + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post(url, json=payload, headers=headers) + + if response.status_code in [200, 201]: + logger.info( + f'Successfully invalidated cache for {config_type} {config_id} ' + f'(operation: {operation})' + ) + return True + else: + logger.warning( + f'Cache invalidation failed for {config_type} {config_id}: ' + f'HTTP {response.status_code} - {response.text}' + ) + return False + + except httpx.TimeoutException as e: + logger.warning( + f'Cache invalidation timeout for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.' + ) + return False + except httpx.RequestError as e: + logger.warning( + f'Cache invalidation request error for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.' + ) + return False + except Exception as e: + logger.warning( + f'Unexpected error during cache invalidation for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.', + exc_info=True, + ) + return False diff --git a/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_utils.py b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_utils.py new file mode 100644 index 00000000..d6020b2c --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/llm_inference_config_module/utils/cache_utils.py @@ -0,0 +1,13 @@ +"""Cache key generation utilities for LLM inference configurations""" + +from uuid import UUID + + +def get_llm_inference_config_cache_key(config_id: UUID) -> str: + """Generate cache key for an LLM inference config""" + return f'llm_inference_config:{config_id}' + + +def get_llm_inference_configs_list_cache_key() -> str: + """Generate cache key for LLM inference configs list""" + return 'llm_inference_configs:list' diff --git a/wavefront/server/modules/llm_inference_config_module/pyproject.toml b/wavefront/server/modules/llm_inference_config_module/pyproject.toml new file mode 100644 index 00000000..7a9c1dac --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/pyproject.toml @@ -0,0 +1,42 @@ +[project] +name = "llm-inference-config-module" +version = "0.1.0" +description = "LLM Inference Configuration module for dynamic model and inference engine configuration" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "db-repo-module", + "fastapi>=0.100.0", + "pydantic>=2.0.0", + "dependency-injector>=4.40.0", + "httpx>=0.25.0", +] + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["llm_inference_config_module"] diff --git a/wavefront/server/modules/llm_inference_config_module/tests/test_inference_proxy.py b/wavefront/server/modules/llm_inference_config_module/tests/test_inference_proxy.py new file mode 100644 index 00000000..fc471bbe --- /dev/null +++ b/wavefront/server/modules/llm_inference_config_module/tests/test_inference_proxy.py @@ -0,0 +1,255 @@ +import pytest +import uuid +from unittest.mock import AsyncMock, Mock + +# from fastapi import Request +# from fastapi.testclient import TestClient +# from httpx import Response as HttpxResponse +from llm_inference_config_module.services.inference_proxy_service import ( + InferenceProxyService, +) +from llm_inference_config_module.models.schemas import InferenceEngineType +from db_repo_module.models.llm_inference_config import LlmInferenceConfig + + +@pytest.fixture +def mock_llm_inference_config_service(): + return AsyncMock() + + +@pytest.fixture +def inference_proxy_service(mock_llm_inference_config_service): + return InferenceProxyService( + llm_inference_config_service=mock_llm_inference_config_service + ) + + +@pytest.fixture +def sample_model_config(): + return LlmInferenceConfig( + id=uuid.uuid4(), + llm_model='gpt-4', + display_name='GPT-4', + api_key='test-api-key', + type='openai', + base_url='https://api.openai.com', + is_deleted=False, + ) + + +@pytest.mark.asyncio +async def test_get_model_config_success( + inference_proxy_service, mock_llm_inference_config_service, sample_model_config +): + """Test successful model config retrieval.""" + model_id = str(sample_model_config.id) + config_dict = sample_model_config.to_dict(exclude_api_key=False) + mock_llm_inference_config_service.get_config.return_value = config_dict + + result = await inference_proxy_service.get_model_config(model_id) + + assert str(result.id) == str(sample_model_config.id) + assert result.llm_model == sample_model_config.llm_model + assert result.api_key == sample_model_config.api_key + mock_llm_inference_config_service.get_config.assert_called_once_with( + sample_model_config.id + ) + + +@pytest.mark.asyncio +async def test_get_model_config_not_found( + inference_proxy_service, mock_llm_inference_config_service +): + """Test model config not found.""" + model_id = str(uuid.uuid4()) + mock_llm_inference_config_service.get_config.return_value = None + + result = await inference_proxy_service.get_model_config(model_id) + + assert result is None + + +@pytest.mark.asyncio +async def test_get_model_config_invalid_uuid( + inference_proxy_service, mock_llm_inference_config_service +): + """Test invalid UUID format.""" + model_id = 'invalid-uuid' + + result = await inference_proxy_service.get_model_config(model_id) + + assert result is None + mock_llm_inference_config_service.get_config.assert_not_called() + + +def test_construct_target_url(inference_proxy_service): + """Test URL construction.""" + base_url = 'https://api.openai.com/' + model_call_path = '/chat/completions' + + result = inference_proxy_service.construct_target_url(base_url, model_call_path) + + assert result == 'https://api.openai.com/chat/completions' + + +def test_construct_target_url_no_slashes(inference_proxy_service): + """Test URL construction without slashes.""" + base_url = 'https://api.openai.com' + model_call_path = 'chat/completions' + + result = inference_proxy_service.construct_target_url(base_url, model_call_path) + + assert result == 'https://api.openai.com/chat/completions' + + +def test_prepare_headers_openai(inference_proxy_service): + """Test header preparation for OpenAI.""" + # Mock request + request = Mock() + request.headers = { + 'Content-Type': 'application/json', + 'User-Agent': 'test-agent', + 'Host': 'should-be-excluded', + 'Content-Length': 'should-be-excluded', + } + + # Mock model config for OpenAI + model_config = Mock() + model_config.api_key = 'test-api-key' + model_config.type = 'openai' + + result = inference_proxy_service.prepare_headers(request, model_config) + + assert result['Content-Type'] == 'application/json' + assert result['User-Agent'] == 'test-agent' + assert result['authorization'] == 'Bearer test-api-key' + assert 'Host' not in result + assert 'Content-Length' not in result + + +def test_prepare_headers_gemini(inference_proxy_service): + """Test header preparation for Gemini.""" + # Mock request + request = Mock() + request.headers = { + 'Content-Type': 'application/json', + 'User-Agent': 'test-agent', + 'authorization': 'Bearer should-be-removed', + } + + # Mock model config for Gemini + model_config = Mock() + model_config.api_key = 'test-gemini-key' + model_config.type = 'gemini' + + result = inference_proxy_service.prepare_headers(request, model_config) + + assert result['Content-Type'] == 'application/json' + assert result['User-Agent'] == 'test-agent' + assert result['x-goog-api-key'] == 'test-gemini-key' + assert 'authorization' not in result # Should be removed + + +def test_detect_streaming_openai_true(inference_proxy_service): + """Test OpenAI streaming detection with stream=true.""" + parsed_data = {'stream': True, 'model': 'gpt-4'} + model_call_path = '/chat/completions' + + result = inference_proxy_service.detect_streaming( + InferenceEngineType.OPENAI, parsed_data, model_call_path + ) + assert result is True + + +def test_detect_streaming_openai_false(inference_proxy_service): + """Test OpenAI streaming detection with stream=false.""" + parsed_data = {'stream': False, 'model': 'gpt-4'} + model_call_path = '/chat/completions' + + result = inference_proxy_service.detect_streaming( + InferenceEngineType.OPENAI, parsed_data, model_call_path + ) + assert result is False + + +def test_detect_streaming_openai_no_stream_key(inference_proxy_service): + """Test OpenAI streaming detection when stream key is missing.""" + parsed_data = {'model': 'gpt-4'} + model_call_path = '/chat/completions' + + result = inference_proxy_service.detect_streaming( + InferenceEngineType.OPENAI, parsed_data, model_call_path + ) + assert result is False + + +def test_detect_streaming_gemini_streaming(inference_proxy_service): + """Test Gemini streaming detection from URL path.""" + parsed_data = {} + model_call_path = '/v1beta/models/gemini-2.5-flash:streamGenerateContent' + + result = inference_proxy_service.detect_streaming( + InferenceEngineType.GEMINI, parsed_data, model_call_path + ) + assert result is True + + +def test_detect_streaming_gemini_non_streaming(inference_proxy_service): + """Test Gemini non-streaming detection from URL path.""" + parsed_data = {} + model_call_path = '/v1beta/models/gemini-2.5-flash:generateContent' + + result = inference_proxy_service.detect_streaming( + InferenceEngineType.GEMINI, parsed_data, model_call_path + ) + assert result is False + + +def test_detect_streaming_fallback_unknown_provider(inference_proxy_service): + """Test fallback behavior for unknown provider.""" + parsed_data = {'stream': True, 'model': 'test-model'} + model_call_path = '/some/path' + + # Using a provider type that's not in the mapping (simulate unknown provider) + result = inference_proxy_service.detect_streaming( + 'unknown_provider', parsed_data, model_call_path + ) + assert result is True + + +def test_extract_openai_model(inference_proxy_service): + """Test OpenAI model extraction from request body.""" + parsed_data = {'model': 'gpt-4', 'stream': False} + model_call_path = '/chat/completions' + + result = inference_proxy_service._extract_openai_model(parsed_data, model_call_path) + assert result == 'gpt-4' + + +def test_extract_gemini_model(inference_proxy_service): + """Test Gemini model extraction from URL path.""" + parsed_data = {} + model_call_path = '/v1beta/models/gemini-2.5-flash:generateContent' + + result = inference_proxy_service._extract_gemini_model(parsed_data, model_call_path) + assert result == 'gemini-2.5-flash' + + +def test_extract_gemini_model_streaming(inference_proxy_service): + """Test Gemini model extraction from streaming URL path.""" + parsed_data = {} + model_call_path = '/v1beta/models/gemini-pro:streamGenerateContent' + + result = inference_proxy_service._extract_gemini_model(parsed_data, model_call_path) + assert result == 'gemini-pro' + + +def test_extract_azure_openai_model(inference_proxy_service): + """Test Azure OpenAI model extraction from URL path.""" + parsed_data = {} + model_call_path = '/openai/deployments/gpt-4/chat/completions' + + result = inference_proxy_service._extract_azure_openai_model( + parsed_data, model_call_path + ) + assert result == 'gpt-4' diff --git a/wavefront/server/modules/plugins_module/README.md b/wavefront/server/modules/plugins_module/README.md new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/__init__.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/__init__.py new file mode 100644 index 00000000..dcaa2bad --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/__init__.py @@ -0,0 +1,4 @@ +from .datasource_controller import datasource_router +from .authenticator_controller import authenticator_router + +__all__ = ['datasource_router', 'authenticator_router'] diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/authenticator_controller.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/authenticator_controller.py new file mode 100644 index 00000000..98f23071 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/authenticator_controller.py @@ -0,0 +1,372 @@ +from dependency_injector.wiring import inject, Provide +from fastapi import Depends, Request, status, APIRouter +from fastapi.responses import JSONResponse +from pydantic import BaseModel +from typing import Dict, Any, Optional +from uuid import UUID + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.authenticator import Authenticator +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from plugins_module.plugins_container import PluginsContainer +from plugins_module.services.authenticator_services import ( + get_authenticator_config, + create_authenticator_config, + update_authenticator_config, + delete_authenticator_config, + get_all_authenticators, + enable_authenticator, + disable_authenticator, +) +from plugins_module.services.datasource_services import check_admin + + +authenticator_router = APIRouter() + + +class CreateAuthenticatorPayload(BaseModel): + auth_name: str + auth_type: str + auth_desc: Optional[str] = None + config: Dict[str, Any] + + +class UpdateAuthenticatorPayload(BaseModel): + auth_desc: Optional[str] = None + config: Dict[str, Any] + + +@authenticator_router.post('/v1/authenticators') +@inject +async def create_authenticator( + request: Request, + payload: CreateAuthenticatorPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Create a new authenticator configuration.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + authenticator = await create_authenticator_config( + auth_name=payload.auth_name, + auth_type=payload.auth_type, + auth_desc=payload.auth_desc, + config=payload.config, + authenticator_repository=authenticator_repository, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Authenticator created successfully', + 'authenticator': authenticator, + } + ), + ) + except ValueError as e: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse(str(e)), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to create authenticator: {str(e)}' + ), + ) + + +@authenticator_router.get('/v1/authenticators') +@inject +async def get_all_authenticators_endpoint( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Get all authenticator configurations.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + authenticators = await get_all_authenticators(authenticator_repository) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'authenticators': authenticators} + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to get authenticators: {str(e)}' + ), + ) + + +@authenticator_router.get('/v1/authenticators/{auth_id}') +@inject +async def get_authenticator( + request: Request, + auth_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Get authenticator configuration by ID.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + auth_uuid = UUID(auth_id) + authenticator = await get_authenticator_config( + auth_uuid, authenticator_repository + ) + + if not authenticator: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Authenticator not found: {auth_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(authenticator), + ) + except ValueError: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Invalid authenticator ID format' + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to get authenticator: {str(e)}' + ), + ) + + +@authenticator_router.put('/v1/authenticators/{auth_id}') +@inject +async def update_authenticator( + request: Request, + auth_id: str, + payload: UpdateAuthenticatorPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Update authenticator configuration.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + auth_uuid = UUID(auth_id) + authenticator = await update_authenticator_config( + auth_id=auth_uuid, + config=payload.config, + auth_desc=payload.auth_desc, + authenticator_repository=authenticator_repository, + ) + + if not authenticator: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Authenticator not found: {auth_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Authenticator updated successfully', + 'authenticator': authenticator, + } + ), + ) + except ValueError as e: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse(str(e)), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to update authenticator: {str(e)}' + ), + ) + + +@authenticator_router.delete('/v1/authenticators/{auth_id}') +@inject +async def delete_authenticator( + request: Request, + auth_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Delete authenticator configuration.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + auth_uuid = UUID(auth_id) + await delete_authenticator_config(auth_uuid, authenticator_repository) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Authenticator deleted successfully'} + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to delete authenticator: {str(e)}' + ), + ) + + +@authenticator_router.post('/v1/authenticators/{auth_id}/enable') +@inject +async def enable_authenticator_endpoint( + request: Request, + auth_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Enable an authenticator.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + auth_uuid = UUID(auth_id) + await enable_authenticator(auth_uuid, authenticator_repository) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': f'Authenticator {auth_id} enabled successfully'} + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to enable authenticator: {str(e)}' + ), + ) + + +@authenticator_router.post('/v1/authenticators/{auth_id}/disable') +@inject +async def disable_authenticator_endpoint( + request: Request, + auth_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Disable an authenticator.""" + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('Admin access required'), + ) + + try: + auth_uuid = UUID(auth_id) + await disable_authenticator(auth_uuid, authenticator_repository) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': f'Authenticator {auth_id} disabled successfully'} + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to disable authenticator: {str(e)}' + ), + ) diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py new file mode 100644 index 00000000..c0fc1a2c --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/datasource_controller.py @@ -0,0 +1,744 @@ +from datasource.bigquery.config import BigQueryConfig +from datasource.redshift.config import RedshiftConfig +from dependency_injector.wiring import inject +import json +from dependency_injector.wiring import Provide +from fastapi import Depends +from fastapi import Query +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from common_module.utils.serializer import serialize_values +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.datasource import Datasource +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from datasource import DatasourcePlugin +from datasource.types import DataSourceType, QueryResult, TableListResult +from plugins_module.services.datasource_services import ( + check_admin, + check_is_valid_resource, + fetch_data_filters, + get_datasource_config, + validate_datasource_payload, +) +from plugins_module.utils.helper import ( + AddDatasourcePayload, + UpdateDatasourcePayload, + InsertRowsJsonPayload, +) +from plugins_module.plugins_container import PluginsContainer +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from fastapi import HTTPException +from user_management_module.utils.user_utils import get_current_user +from plugins_module.services.dynamic_query_service import DynamicQueryService +from db_repo_module.cache.cache_manager import CacheManager +from ..utils.helper import generate_cache_key, validate_yaml_query +import yaml +from ..utils.helper import DynamicQueryRequest +from ..utils.helper import DynamicQueryExecuteRequest +from datetime import datetime + + +datasource_router = APIRouter() + + +@datasource_router.post('/v1/datasources') +@inject +async def add_datasource( + request: Request, + add_datasource_payload: AddDatasourcePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide[PluginsContainer.datasource_repository] + ), +): + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + if not validate_datasource_payload(add_datasource_payload): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('Invalid datasource payload'), + ) + + config_json = json.loads(add_datasource_payload.config) + + if add_datasource_payload.type == DataSourceType.GCP_BIGQUERY: + config = BigQueryConfig(**config_json) + elif add_datasource_payload.type == DataSourceType.AWS_REDSHIFT: + config = RedshiftConfig(**config_json) + else: + raise ValueError(f'Invalid datasource type: {add_datasource_payload.type}') + + datasource_plugin = DatasourcePlugin(add_datasource_payload.type, config) + + connection_result = await datasource_plugin.test_connection() + + if not connection_result: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + 'Data source connection failed.' + ), + ) + + datasource: Datasource = await datasource_repository.create( + name=add_datasource_payload.name, + type=add_datasource_payload.type, + config=config_json, + description=add_datasource_payload.description, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Datasource created successfully', + 'datasource_id': str(datasource.id), + } + ), + ) + + +@datasource_router.patch('/v1/datasources/{datasource_id}') +@inject +async def update_datasource( + request: Request, + datasource_id: str, + update_datasource_payload: UpdateDatasourcePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide[PluginsContainer.datasource_repository] + ), +): + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + # Check if datasource exists + existing_datasource = await datasource_repository.find_one(id=datasource_id) + if not existing_datasource: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + + # Prepare update data + update_data = {} + + if update_datasource_payload.name is not None: + update_data['name'] = update_datasource_payload.name + + if update_datasource_payload.description is not None: + update_data['description'] = update_datasource_payload.description + + # Handle type and config updates (they go together) + if ( + update_datasource_payload.type is not None + or update_datasource_payload.config is not None + ): + # Use provided type or keep existing type + datasource_type = update_datasource_payload.type or existing_datasource.type + + if update_datasource_payload.config is not None: + payload_config = json.loads(update_datasource_payload.config) + + if datasource_type == DataSourceType.GCP_BIGQUERY: + config = BigQueryConfig(**payload_config) + elif datasource_type == DataSourceType.AWS_REDSHIFT: + config = RedshiftConfig(**payload_config) + else: + raise ValueError(f'Invalid datasource type: {datasource_type}') + + # Test connection with new config + datasource_plugin = DatasourcePlugin(datasource_type, config) + connection_result = await datasource_plugin.test_connection() + + if not connection_result: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + 'Data source connection failed.' + ), + ) + + update_data['config'] = payload_config + + if update_datasource_payload.type is not None: + update_data['type'] = datasource_type + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'No valid fields provided for update' + ), + ) + + # Update datasource + updated_datasource = await datasource_repository.find_one_and_update( + filters={'id': datasource_id}, refresh=True, **update_data + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Datasource updated successfully', + 'datasource_id': str(updated_datasource.id), + 'datasource': Datasource.to_dict(updated_datasource), + } + ), + ) + + +@datasource_router.delete('/v1/datasources/{datasource_id}') +@inject +async def delete_datasource( + request: Request, + datasource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide[PluginsContainer.datasource_repository] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + # Check if datasource exists + existing_datasource = await datasource_repository.find_one(id=datasource_id) + if not existing_datasource: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + + # Delete datasource + await datasource_repository.delete_all(id=datasource_id) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Datasource deleted successfully', + 'datasource_id': str(datasource_id), + } + ), + ) + + +@datasource_router.get('/v1/datasources') +@inject +async def get_datasources( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide[PluginsContainer.datasource_repository] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + datasources = await datasource_repository.find() + datasources = [Datasource.to_dict(datasource) for datasource in datasources] + return JSONResponse( + content=response_formatter.buildSuccessResponse({'datasources': datasources}), + status_code=status.HTTP_200_OK, + ) + + +@datasource_router.get('/v1/datasources/{datasource_id}') +@inject +async def get_datasource( + request: Request, + datasource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide[PluginsContainer.datasource_repository] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + datasource = await datasource_repository.find_one(id=datasource_id) + + if not datasource: + return JSONResponse( + content=response_formatter.buildSuccessResponse('Datasource not found'), + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + return JSONResponse( + content=response_formatter.buildSuccessResponse(Datasource.to_dict(datasource)), + status_code=status.HTTP_200_OK, + ) + + +@datasource_router.post('/v1/datasources/{datasource_id}/test-connection') +@inject +async def test_datasource_connection( + request: Request, + datasource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + role_id = request.state.session.role_id + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + connection_result = await datasource_plugin.test_connection() + return JSONResponse( + content=connection_result.result, + status_code=status.HTTP_200_OK, + ) + + +@datasource_router.get('/v1/datasources/{datasource_id}/resources') +@inject +async def get_tables( + request: Request, + datasource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + role_id = request.state.session.role_id + + is_admin = await check_admin(role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + table_list: TableListResult = datasource_plugin.get_table_names() + return JSONResponse( + content=response_formatter.buildSuccessResponse( + {'resources': table_list.result} + ), + status_code=status.HTTP_200_OK, + ) + + +@datasource_router.get('/v1/datasources/{datasource_id}/resources/{resource_id}') +@inject +async def query_datasource( + request: Request, + datasource_id: str, + resource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + query_filter: str | None = Query(None, alias='$filter'), + projection: str | None = Query('*', alias='$select'), + expand: str | None = Query(None, alias='$expand'), + join: str | None = Query(None, alias='$join'), + order_by: str | None = Query(None, alias='$orderby'), + group_by: str | None = Query(None, alias='$groupby'), + offset: int | None = 0, + limit: int | None = 10, +): + user_id = request.state.session.user_id + role_id = request.state.session.role_id + + resource_is_valid = check_is_valid_resource(resource_id) + if not resource_is_valid: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid resource name: {resource_id}' + ), + ) + + if resource_id == 'parsed_data_object': + resource_id = 'rf_parsed_data_object' + + rls_filters = [] + filter = query_filter + is_admin = await check_admin(role_id) + if not is_admin: + rls_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + + if len(rls_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + + rls_filters = fetch_data_filters(rls_filters) + if query_filter: + filter = f"{query_filter} $and ({' $and '.join(rls_filters)})" + else: + filter = f"{ ' $and '.join(rls_filters)}" + + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + + join_query = None + if join and expand: + join_query = f'$expand={expand}&$join={join}' + + result: QueryResult = datasource_plugin.fetch_data( + table_name=resource_id, + projection=projection, + filter=filter, + join=join_query, + offset=offset, + limit=limit, + order_by=order_by, + group_by=group_by, + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'records': serialize_values(result.result)} + ), + ) + + +@datasource_router.post('/v1/datasources/{datasource_id}/resources/{resource_id}') +@inject +async def insert_rows_json( + request: Request, + datasource_id: str, + resource_id: str, + insert_rows_json_payload: InsertRowsJsonPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + rows_with_created_at = [ + {**row, 'created_at': datetime.now().isoformat()} + for row in insert_rows_json_payload.data + ] + datasource_plugin.insert_rows_json(resource_id, rows_with_created_at) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': f'Inserted {len(insert_rows_json_payload.data)} rows successfully' + } + ), + ) + + +@datasource_router.put('/v1/{datasource_id}/dynamic-queries') +@inject +async def create_dynamic_query( + request: Request, + datasource_id: str, + dynamic_query: DynamicQueryRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_yaml_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_admin(role_id) + if not is_admin: + raise HTTPException(status_code=401, detail='Unauthorized') + + # validating the yaml string + yaml_content = yaml.safe_load(dynamic_query.dynamic_query) + + if not validate_yaml_query(yaml_content): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail='Invalid YAML query' + ) + + await dynamic_query_yaml_service.store_yaml_to_bucket(yaml_content, datasource_id) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Dynamic query uploaded successfully'} + ), + ) + + +@datasource_router.get('/v1/{datasource_id}/dynamic-queries') +@inject +async def get_all_dynamic_query_yaml( + request: Request, + datasource_id: str, + page_number: int = Query(1), + page_size: int = Query(50), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_yaml_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_admin(role_id) + if not is_admin: + raise HTTPException(status_code=401, detail='Unauthorized') + + result = await dynamic_query_yaml_service.retrive_dynamic_query_yaml( + page_number, page_size + ) + + if not result['yamls']: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'yamls': []}), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(result), + ) + + +@datasource_router.get('/v1/{datasource_id}/dynamic-queries/{query_id}') +@inject +async def get_dynamic_query( + request: Request, + query_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_admin(role_id) + if not is_admin: + raise HTTPException(status_code=401, detail='Unauthorized') + + yaml_query, yaml_name = await dynamic_query_service.get_dynamic_yaml_query(query_id) + + if not yaml_query: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Dynamic query not found: {query_id}' + ), + ) + # returning the first query + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'yaml_name': yaml_name, + 'yaml_query': yaml_query, + } + ), + ) + + +@datasource_router.post('/v1/{datasource_id}/dynamic-queries/{query_id}/execute') +@inject +async def execute_dynamic_query( + request: Request, + datasource_id: str, + query_id: str, + filter: str | None = Query(None, alias='$filter'), + offset: int | None = 0, + limit: int | None = 100, + dynamic_query_params: DynamicQueryExecuteRequest = None, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_yaml_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + cache_manager: CacheManager = Depends(Provide[PluginsContainer.cache_manager]), + force_fetch: int = Query(0), +): + role_id, user_id, _ = get_current_user(request) + datasource_type, datasource_config = await get_datasource_config(datasource_id) + if not datasource_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Datasource not found: {datasource_id}' + ), + ) + # fetching the yaml query based on the query_id + yaml_query, _ = await dynamic_query_yaml_service.get_dynamic_yaml_query(query_id) + + rls_filter_str = None + is_admin = await check_admin(role_id) + if not is_admin: + rls_filters = await user_service.get_user_resources( + user_id=user_id, scope=ResourceScope.DATA + ) + if len(rls_filters) == 0: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Data access not set for non-admin user' + ), + ) + rls_filters = fetch_data_filters(rls_filters) + rls_filter_str = f"{ ' $and '.join(rls_filters)}" + datasource_plugin = DatasourcePlugin(datasource_type, datasource_config) + # checking if the given query is already in cache + cache_key = generate_cache_key( + query_id, + filter, + rls_filter_str, + limit, + offset, + dynamic_query_params.params if dynamic_query_params else None, + ) + if not force_fetch: + cached_result = cache_manager.get_str(cache_key) + if cached_result: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + json.loads(cached_result) + ), + ) + res = await datasource_plugin.execute_dynamic_query( + yaml_query, + rls_filter_str, + filter, + offset, + limit, + dynamic_query_params.params if dynamic_query_params else None, + ) + # Serialize date/datetime objects before JSON serialization + serialized_res = serialize_values(res) + # caching the result + cache_manager.add(cache_key, json.dumps(serialized_res), expiry=60 * 2) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(serialized_res), + ) + + +@datasource_router.delete('/v1/{datasource_id}/dynamic-queries/{query_id}') +@inject +async def delete_dynamic_query( + request: Request, + datasource_id: str, + query_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + dynamic_query_yaml_service: DynamicQueryService = Depends( + Provide[PluginsContainer.dynamic_query_service] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_admin(role_id) + if not is_admin: + raise HTTPException(status_code=401, detail='Unauthorized') + await dynamic_query_yaml_service.delete_dynamic_query(datasource_id, query_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Dynamic query deleted successfully'} + ), + ) diff --git a/wavefront/server/modules/plugins_module/plugins_module/controllers/message_processor_controller.py b/wavefront/server/modules/plugins_module/plugins_module/controllers/message_processor_controller.py new file mode 100644 index 00000000..ba8a3a44 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/controllers/message_processor_controller.py @@ -0,0 +1,326 @@ +""" +Controller for message processor endpoints. +Handles creation, execution, and management of functions stored in cloud storage. +""" + +from dependency_injector.wiring import inject, Provide +from fastapi import Depends, status, APIRouter +from pydantic import BaseModel +from typing import Dict, Any, Optional +from fastapi.responses import JSONResponse +import yaml + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from plugins_module.plugins_container import PluginsContainer +from plugins_module.services.message_processor_service import MessageProcessorService + + +message_processor_router = APIRouter() + + +class CreateMessageProcessorPayload(BaseModel): + """Payload for creating a message processor from YAML.""" + + name: str + yaml_content: str # YAML as string + description: Optional[str] = None + + +class UpdateMessageProcessorPayload(BaseModel): + """Payload for updating a message processor.""" + + name: Optional[str] = None + description: Optional[str] = None + yaml_content: Optional[str] = None + + +class ExecuteMessageProcessorPayload(BaseModel): + """Payload for executing a processor function.""" + + input_data: Dict[str, Any] + execution_context: Optional[Dict[str, Any]] = None + + +@message_processor_router.post('/v1/message-processors') +@inject +async def create_message_processor( + payload: CreateMessageProcessorPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """ + Create a new message processor from YAML configuration. + + The YAML will be stored directly in the cloud storage bucket, + and the file URL will be saved in the database. + """ + existing_processor = await processor_service.get_message_processor( + name=payload.name + ) + if existing_processor: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Message processor with name {payload.name} already exists' + ), + ) + + processor = await processor_service.create_message_processor( + name=payload.name, + yaml_content=payload.yaml_content, + description=payload.description, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Message processor created successfully', + 'processor_id': str(processor.id), + } + ), + ) + + +@message_processor_router.get('/v1/message-processors') +@inject +async def list_message_processors( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """List message processors.""" + processors = await processor_service.list_message_processors() + + processors_list = [processor.to_dict() for processor in processors] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'processors': processors_list, + } + ), + ) + + +@message_processor_router.get('/v1/message-processors/{processor_id}') +@inject +async def get_message_processor( + processor_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """Get a message processor by ID.""" + processor = await processor_service.get_message_processor(id=processor_id) + + if not processor: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Message processor {processor_id} not found' + ), + ) + + processor_dict = processor.to_dict() + + yaml_content = await processor_service.get_message_processor_yaml_content(processor) + processor_dict['yaml_content'] = yaml_content + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'processor': processor_dict, + } + ), + ) + + +@message_processor_router.put('/v1/message-processors/{processor_id}') +@inject +async def update_message_processor( + processor_id: str, + payload: UpdateMessageProcessorPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """Update a message processor.""" + processor = await processor_service.get_message_processor(id=processor_id) + + if not processor: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Message processor {processor_id} not found' + ), + ) + + updates: Dict[str, Any] = {} + if payload.description is not None: + updates['description'] = payload.description + + if payload.name is not None: + existing_processor = await processor_service.get_message_processor( + name=payload.name + ) + if existing_processor and str(existing_processor.id) != processor_id: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Message processor with name {payload.name} already exists' + ), + ) + updates['name'] = payload.name + + updated_processor = await processor_service.update_message_processor( + processor=processor, + updates=updates, + yaml_content=payload.yaml_content, + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'processor': updated_processor.to_dict(), + } + ), + ) + + +@message_processor_router.delete('/v1/message-processors/{processor_id}') +@inject +async def delete_message_processor( + processor_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """Delete a message processor.""" + deleted = await processor_service.delete_message_processor( + processor_id=processor_id + ) + + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Message processor {processor_id} not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': f'Message processor {processor_id} deleted successfully', + } + ), + ) + + +@message_processor_router.post('/v1/message-processors/{processor_id}/execute') +@inject +async def execute_message_processor( + processor_id: str, + payload: ExecuteMessageProcessorPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + processor_service: MessageProcessorService = Depends( + Provide[PluginsContainer.message_processor_service] + ), +): + """ + Execute a message processor function in an isolated VM. + + The function will be loaded from cloud storage and executed + with the provided input data in the specified runtime environment. + """ + processor = await processor_service.get_message_processor(id=processor_id) + if not processor: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Message processor {processor_id} not found' + ), + ) + yaml_content = await processor_service.get_message_processor_yaml_content(processor) + yaml_dict = yaml.safe_load(yaml_content) + # Validate required YAML structure + required_keys = ['function', 'input_schema', 'type'] + missing_keys = [key for key in required_keys if key not in yaml_dict] + if missing_keys: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Invalid processor YAML: missing keys {missing_keys}' + ), + ) + + function = yaml_dict['function'] + inputs = yaml_dict['input_schema'] + execution_environment = yaml_dict['type'] + execution_code = function['code'] + + if 'required' not in inputs: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + "Invalid processor YAML: input_schema missing 'required' field" + ), + ) + + required_inputs = inputs['required'] + execution_inputs = {} + for input in required_inputs: + if input not in payload.input_data.keys(): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Input `{input}` is required but not provided' + ), + ) + execution_inputs[input] = payload.input_data[input] + + try: + result = await processor_service.execute_message_processor( + code=execution_code, + type=execution_environment, + input=execution_inputs, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'result': result, + } + ), + ) + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Error executing message processor: {str(e)}' + ), + ) diff --git a/wavefront/server/modules/plugins_module/plugins_module/plugins_container.py b/wavefront/server/modules/plugins_module/plugins_module/plugins_container.py new file mode 100644 index 00000000..e7e56f66 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/plugins_container.py @@ -0,0 +1,63 @@ +from dependency_injector import containers +from dependency_injector import providers +from db_repo_module.models.datasource import Datasource +from db_repo_module.models.authenticator import Authenticator +from db_repo_module.models.message_processors import MessageProcessors +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from plugins_module.services.dynamic_query_service import DynamicQueryService +from plugins_module.services.message_processor_service import MessageProcessorService +from flo_cloud.cloud_storage import CloudStorageManager + + +class PluginsContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + db_client = providers.Dependency() + + cache_manager = providers.Dependency() + + dynamic_query_repository = providers.Dependency() + + datasource_repository = providers.Singleton( + SQLAlchemyRepository[Datasource], + model=Datasource, + db_client=db_client, + ) + + authenticator_repository = providers.Singleton( + SQLAlchemyRepository[Authenticator], + model=Authenticator, + db_client=db_client, + ) + + message_processor_repository = providers.Singleton( + SQLAlchemyRepository[MessageProcessors], + model=MessageProcessors, + db_client=db_client, + ) + + # dynamic query service + cloud_provider = config.cloud_config.cloud_provider + + cloud_manager = providers.Singleton( + CloudStorageManager, provider=config.cloud_config.cloud_provider + ) + + dynamic_query_service = providers.Singleton( + DynamicQueryService, + cloud_manager=cloud_manager, + dynamic_query_repo=dynamic_query_repository, + bucket_name=config.aws.aws_asset_storage_bucket + if cloud_provider == 'aws' + else config.gcp.gcp_asset_storage_bucket, + ) + + message_processor_service = providers.Singleton( + MessageProcessorService, + cloud_manager=cloud_manager, + message_processor_repository=message_processor_repository, + bucket_name=config.aws.aws_asset_storage_bucket + if cloud_provider == 'aws' + else config.gcp.gcp_asset_storage_bucket, + hermes_url=config.hermes.url, + ) diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/__init__.py b/wavefront/server/modules/plugins_module/plugins_module/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/authenticator_services.py b/wavefront/server/modules/plugins_module/plugins_module/services/authenticator_services.py new file mode 100644 index 00000000..c8057a94 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/services/authenticator_services.py @@ -0,0 +1,366 @@ +from typing import Dict, Any, Optional, List +from uuid import UUID + +from db_repo_module.models.authenticator import Authenticator +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from authenticator import AuthenticatorType +from authenticator.factory import get_authenticator_factory +from authenticator.types import AuthenticatorABC + + +async def get_authenticator_config( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> Optional[Dict[str, Any]]: + """Get authenticator configuration by ID.""" + authenticator = await authenticator_repository.find_one( + auth_id=auth_id, is_deleted=False + ) + + if not authenticator: + return None + + return { + 'auth_id': str(authenticator.auth_id), + 'auth_name': authenticator.auth_name, + 'auth_type': authenticator.auth_type, + 'auth_desc': authenticator.auth_desc, + 'config': authenticator.config, + 'is_enabled': authenticator.is_enabled, + 'created_at': authenticator.created_at.isoformat() + if authenticator.created_at + else None, + 'updated_at': authenticator.updated_at.isoformat() + if authenticator.updated_at + else None, + } + + +async def validate_authenticator_type( + auth_type: str, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> bool: + """Validate if authenticator type is enabled.""" + enabled_auth = await authenticator_repository.find_one( + auth_type=auth_type, is_enabled=True, is_deleted=False + ) + return enabled_auth is not None + + +async def create_authenticator_config( + auth_name: str, + auth_type: str, + auth_desc: Optional[str], + config: Dict[str, Any], + authenticator_repository: SQLAlchemyRepository[Authenticator], +) -> Dict[str, Any]: + """Create new authenticator configuration.""" + + # Validate auth_name has no spaces + if ' ' in auth_name: + raise ValueError('Authenticator name cannot contain spaces') + + # Validate configuration based on type + auth_type_enum = AuthenticatorType(auth_type) + factory = get_authenticator_factory() + # Validate config without creating instance + factory.validate_config(auth_type_enum, config) + + # Check if authenticator name already exists + existing = await authenticator_repository.find_one(auth_name=auth_name) + if existing and not existing.is_deleted: + raise ValueError(f"Authenticator with name '{auth_name}' already exists") + + # Create or update authenticator + if existing: + # Reactivate deleted authenticator + authenticator = await authenticator_repository.find_one_and_update( + {'auth_name': auth_name}, + refresh=True, + auth_type=auth_type, + auth_desc=auth_desc, + config=config, + is_enabled=True, + is_deleted=False, + ) + # Add to factory with auth_id + factory.get_authenticator(str(authenticator.auth_id), auth_type_enum, config) + else: + # Create new authenticator + authenticator = await authenticator_repository.create( + auth_name=auth_name, + auth_type=auth_type, + auth_desc=auth_desc, + config=config, + is_enabled=True, + ) + # Add to factory with auth_id + factory.get_authenticator(str(authenticator.auth_id), auth_type_enum, config) + + return { + 'auth_id': str(authenticator.auth_id), + 'auth_name': authenticator.auth_name, + 'auth_type': authenticator.auth_type, + 'auth_desc': authenticator.auth_desc, + 'config': authenticator.config, + 'is_enabled': authenticator.is_enabled, + 'created_at': authenticator.created_at.isoformat() + if authenticator.created_at + else None, + 'updated_at': authenticator.updated_at.isoformat() + if authenticator.updated_at + else None, + } + + +async def update_authenticator_config( + auth_id: UUID, + config: Dict[str, Any], + auth_desc: Optional[str] = None, + authenticator_repository: SQLAlchemyRepository[Authenticator] = None, +) -> Optional[Dict[str, Any]]: + """Update existing authenticator configuration.""" + + authenticator = await authenticator_repository.find_one( + auth_id=auth_id, is_deleted=False + ) + + if not authenticator: + return None + + factory = get_authenticator_factory() + auth_type_enum = AuthenticatorType(authenticator.auth_type) + + # Validate config without creating instance + factory.validate_config(auth_type_enum, config) + + # Update authenticator + update_data = {'config': config} + if auth_desc is not None: + update_data['auth_desc'] = auth_desc + + updated_authenticator = await authenticator_repository.find_one_and_update( + {'auth_id': auth_id}, refresh=True, **update_data + ) + + # Update factory with auth_id + factory.update_authenticator(str(auth_id), auth_type_enum, config) + + return { + 'auth_id': str(updated_authenticator.auth_id), + 'auth_name': updated_authenticator.auth_name, + 'auth_type': updated_authenticator.auth_type, + 'auth_desc': updated_authenticator.auth_desc, + 'config': updated_authenticator.config, + 'is_enabled': updated_authenticator.is_enabled, + 'created_at': updated_authenticator.created_at.isoformat() + if updated_authenticator.created_at + else None, + 'updated_at': updated_authenticator.updated_at.isoformat() + if updated_authenticator.updated_at + else None, + } + + +async def delete_authenticator_config( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> None: + """Soft delete authenticator configuration.""" + + authenticator = await authenticator_repository.find_one( + auth_id=auth_id, is_deleted=False + ) + + if not authenticator: + raise Exception('Authenticator not found. Might be deleted') + + # Remove from factory cache before deletion + factory = get_authenticator_factory() + auth_type_enum = AuthenticatorType(authenticator.auth_type) + factory.remove_authenticator(str(auth_id), auth_type_enum) + + await authenticator_repository.find_one_and_update( + {'auth_id': auth_id}, refresh=False, is_deleted=True + ) + + +async def test_authenticator_health( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> Dict[str, Any]: + """Test authenticator health and connectivity by auth_id.""" + + # Get authenticator instance and config to distinguish not found vs disabled + authenticator, config_data = await get_authenticator_with_config( + auth_id, authenticator_repository + ) + + # Authenticator not found + if config_data is None: + return { + 'healthy': False, + 'message': f"Authenticator ID '{auth_id}' not found", + 'details': {}, + } + + # Authenticator exists but is disabled + if authenticator is None: + return { + 'healthy': False, + 'message': f"Authenticator ID '{auth_id}' is disabled", + 'details': {'is_enabled': False}, + } + + # Authenticator is enabled, check health + health_result = authenticator.get_health_status() + + return { + 'healthy': health_result.healthy, + 'message': health_result.message, + 'last_check': health_result.last_check.isoformat() + if health_result.last_check + else None, + 'details': health_result.details or {}, + } + + +async def get_authenticator_instance( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> Optional[AuthenticatorABC]: + """ + Get authenticator instance by ID. + + Returns None if authenticator doesn't exist OR if it's disabled. + Callers must check is_enabled separately if they need to distinguish + between missing and disabled authenticators. + """ + + config_data = await get_authenticator_config(auth_id, authenticator_repository) + if not config_data: + return None + + # Return None if disabled (don't raise exception to keep function side-effect-free) + if not config_data['is_enabled']: + return None + + auth_type_enum = AuthenticatorType(config_data['auth_type']) + factory = get_authenticator_factory() + return factory.get_authenticator( + str(auth_id), auth_type_enum, config_data['config'] + ) + + +async def get_authenticator_with_config( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> tuple[Optional[AuthenticatorABC], Optional[Dict[str, Any]]]: + """ + Get authenticator instance and its configuration by ID. + + Returns: + tuple: (authenticator_instance, config_data) + - (None, None): Authenticator not found + - (None, config_data): Authenticator exists but is disabled + - (authenticator_instance, config_data): Authenticator is enabled and ready + """ + + config_data = await get_authenticator_config(auth_id, authenticator_repository) + if not config_data: + return None, None + + # If disabled, return config but no instance + if not config_data['is_enabled']: + return None, config_data + + auth_type_enum = AuthenticatorType(config_data['auth_type']) + factory = get_authenticator_factory() + authenticator = factory.get_authenticator( + str(auth_id), auth_type_enum, config_data['config'] + ) + + return authenticator, config_data + + +async def get_authenticator_instance_by_name( + auth_name: str, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> Optional[AuthenticatorABC]: + """Get authenticator instance by name.""" + + authenticator = await authenticator_repository.find_one( + auth_name=auth_name, is_deleted=False + ) + + if not authenticator: + return None + + auth_type_enum = AuthenticatorType(authenticator.auth_type) + factory = get_authenticator_factory() + return factory.get_authenticator( + str(authenticator.auth_id), auth_type_enum, authenticator.config + ) + + +async def get_all_authenticators( + authenticator_repository: SQLAlchemyRepository[Authenticator], +) -> List[Dict[str, Any]]: + """Get list of all authenticators.""" + authenticators = await authenticator_repository.find(is_deleted=False) + + return [ + { + 'auth_id': str(auth.auth_id), + 'auth_name': auth.auth_name, + 'auth_type': auth.auth_type, + 'auth_desc': auth.auth_desc, + 'is_enabled': auth.is_enabled, + 'created_at': auth.created_at.isoformat() if auth.created_at else None, + 'updated_at': auth.updated_at.isoformat() if auth.updated_at else None, + } + for auth in authenticators + ] + + +async def enable_authenticator( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> None: + """Enable an authenticator.""" + authenticator = await authenticator_repository.find_one( + auth_id=auth_id, is_deleted=False + ) + if not authenticator: + raise Exception('Authenticator not found. Might be deleted') + + if authenticator.is_enabled: + raise Exception('Authenticator is already enabled') + + await authenticator_repository.find_one_and_update( + {'auth_id': auth_id}, refresh=False, is_enabled=True + ) + + +async def disable_authenticator( + auth_id: UUID, authenticator_repository: SQLAlchemyRepository[Authenticator] +) -> None: + """Disable an authenticator.""" + authenticator = await authenticator_repository.find_one( + auth_id=auth_id, is_deleted=False + ) + if not authenticator: + raise Exception('Authenticator not found. Might be deleted') + + if not authenticator.is_enabled: + raise Exception('Authenticator is already disabled') + + # Check if there's at least one other enabled authenticator + other_enabled_authenticators = await authenticator_repository.find( + is_enabled=True, is_deleted=False + ) + # Filter out the current authenticator from the list + other_enabled_authenticators = [ + auth for auth in other_enabled_authenticators if auth.auth_id != auth_id + ] + + if not other_enabled_authenticators: + raise Exception( + 'Cannot disable authenticator. At least one authenticator must remain enabled' + ) + + await authenticator_repository.find_one_and_update( + {'auth_id': auth_id}, refresh=False, is_enabled=False + ) diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py b/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py new file mode 100644 index 00000000..744d9913 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/services/datasource_services.py @@ -0,0 +1,84 @@ +import collections +from datasource import DataSourceType, BigQueryConfig, RedshiftConfig +from db_repo_module.models.datasource import Datasource +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from auth_module.auth_container import AuthContainer +from plugins_module.plugins_container import PluginsContainer +from plugins_module.utils.helper import AddDatasourcePayload +from user_management_module.constants.auth import SERVICE_AUTH_ROLE_ID + + +async def get_datasource_config( + datasource_id: str, + datasource_repository: SQLAlchemyRepository[Datasource] = Depends( + Provide(PluginsContainer.datasource_repository) + ), +) -> tuple[DataSourceType, BigQueryConfig | RedshiftConfig]: + datasource: Datasource | None = await datasource_repository.find_one( + id=datasource_id + ) + if not datasource: + return None, None + + if datasource.type == DataSourceType.GCP_BIGQUERY: + return DataSourceType.GCP_BIGQUERY, BigQueryConfig(**datasource.config) + elif datasource.type == DataSourceType.AWS_REDSHIFT: + return DataSourceType.AWS_REDSHIFT, RedshiftConfig(**datasource.config) + else: + raise ValueError(f'Invalid datasource type: {datasource.type}') + + +@inject +async def check_admin( + role_id: str, + role_repositroy: SQLAlchemyRepository[Role] = Depends( + Provide(AuthContainer.role_repository) + ), +) -> bool: + if role_id == SERVICE_AUTH_ROLE_ID: + return True + role = await role_repositroy.find_one(id=role_id) + if not role: + return False + return role.name == 'admin' + + +def check_is_valid_resource(resource_id: str) -> bool: + if resource_id in [ + 'parsed_data_object', + 'rf_parsed_data_object', + 'rf_gold_data_object', + 'rf_gold_item_details', + ]: + return True + return False + + +def fetch_data_filters(data_filters: list) -> str: + group_filter = collections.defaultdict(list) + for data_filter in data_filters: + group_filter[data_filter.key].append(data_filter.value) + + additional_filters = [] + for key, values in group_filter.items(): + if len(values) == 1: + additional_filters.append(f"({key} eq '{values[0]}')") + else: + or_condition = [] + for value in values: + or_condition.append(f"({key} eq '{value}')") + additional_filters.append(f"{'$or'.join(or_condition)}") + + return additional_filters + + +def validate_datasource_payload(payload: AddDatasourcePayload) -> bool: + if payload.type not in [ + datasource_type.value for datasource_type in DataSourceType + ]: + return False + return True diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/dynamic_query_service.py b/wavefront/server/modules/plugins_module/plugins_module/services/dynamic_query_service.py new file mode 100644 index 00000000..2dda5a78 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/services/dynamic_query_service.py @@ -0,0 +1,150 @@ +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +import yaml +from common_module.log.logger import logger +from flo_cloud.cloud_storage import CloudStorageManager +from db_repo_module.models.dynamic_query_yaml import DynamicQueryYaml + + +class DynamicQueryService: + def __init__( + self, + cloud_manager: CloudStorageManager, + dynamic_query_repo: SQLAlchemyRepository[DynamicQueryYaml], + bucket_name: str = None, + ): + self.cloud_manager = cloud_manager + self.dynamic_query_repo = dynamic_query_repo + self.bucket_name = bucket_name + self.prefix = 'dynamic_query/v1' + + async def store_yaml_to_bucket(self, yaml_content: dict, datasource_id: str): + """Store YAML file to cloud storage and save metadata to database + + Args: + yaml_content: The YAML content as a dictionary + datasource_id: The ID of the datasource + + Raises: + ValueError: If YAML file is invalid or missing required 'id' field + """ + try: + yaml_id = yaml_content.get('id', '') + if not yaml_id: + raise ValueError("YAML file must contain an 'id' field") + + # generating file key + file_key = f'{self.prefix}/{yaml_id}.yaml' + + # Convert the dictionary to YAML string and then to bytes + yaml_string = yaml.dump(yaml_content, default_flow_style=False) + file_content = yaml_string.encode('utf-8') + + # storing to s3bucket + self.cloud_manager.save_small_file( + file_content=file_content, bucket_name=self.bucket_name, key=file_key + ) + + # strogin to db + await self.dynamic_query_repo.upsert( + filters={'name': yaml_id}, + datasource_id=datasource_id, + file_path=file_key, + ) + + except ValueError as e: + logger.error(f'Error uploading dynamic query YAML {yaml_id}: {str(e)}') + raise + except Exception as e: + logger.error( + f'Unexpected error uploading dynamic query YAML {yaml_id}: {str(e)}' + ) + raise ValueError(f'Failed to upload YAML file: {str(e)}') + + async def retrive_dynamic_query_yaml(self, page_number, page_size): + """Retrieve dynamic query YAML files from cloud storage with pagination + + Args: + page_number: The page number for pagination + page_size: Number of items per page + + Returns: + dict: Contains yamls list, pagination info, and total count + """ + files_keys, has_more = self.cloud_manager.list_files( + self.bucket_name, self.prefix, page_size, page_number + ) + yamls = [] + + for file_key in files_keys: + splitter = file_key.split('/') + if len(splitter) >= 3: + yamls.append( + {'version': splitter[1], 'file': splitter[2], 'full_path': file_key} + ) + + return { + 'yamls': yamls, + 'has_more': has_more, + 'page_number': page_number, + 'page_size': page_size, + 'total_count': len(yamls), + } + + async def get_dynamic_yaml_query(self, query_id: str): + """Get dynamic yaml query from cloud storage + + Args: + query_id: The ID of the query + + Returns: + dict: Contains yaml query and their parameters + """ + file_key = f'{self.prefix}/{query_id}.yaml' + file_content = self.cloud_manager.read_file(self.bucket_name, file_key) + yaml_query = yaml.safe_load(file_content.decode('utf-8')) + if not yaml_query: + raise ValueError('YAML file is invalid') + + querys = [] + for query in yaml_query['queries']: + query_data = { + 'id': query['id'], + 'query': query['query'], + } + if 'description' in query: + query_data['description'] = query['description'] + # Add parameters only if they exist in the query + if 'parameters' in query and query['parameters']: + # Handle both list format (with name/type) and dict format + if isinstance(query['parameters'], list): + query_data['parameters'] = query['parameters'] + else: + raise ValueError('Invalid parameters format') + + querys.append(query_data) + + return querys, yaml_query['name'] if 'name' in yaml_query else None + + async def delete_dynamic_query(self, datasource_id: str, query_id: str): + """Delete dynamic query from cloud storage and database + + Args: + datasource_id: The ID of the datasource + query_id: The ID of the query + """ + try: + # cheking whether the given query id is present in the database + query = await self.dynamic_query_repo.find_one( + name=query_id, datasource_id=datasource_id + ) + if not query: + raise ValueError(f'Query {query_id} not found') + + # deleting the file from the cloud storage + self.cloud_manager.delete_file(self.bucket_name, query.file_path) + # deleting the record from the database + await self.dynamic_query_repo.delete_all(name=query_id) + + except Exception as e: + logger.error(f'Error deleting dynamic query {query_id}: {str(e)}') + raise diff --git a/wavefront/server/modules/plugins_module/plugins_module/services/message_processor_service.py b/wavefront/server/modules/plugins_module/plugins_module/services/message_processor_service.py new file mode 100644 index 00000000..ab1bb6b2 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/services/message_processor_service.py @@ -0,0 +1,184 @@ +""" +Service for managing and executing functions stored in cloud storage buckets. +YAML files are stored directly in buckets, and only the file URL is stored in the database. +""" + +import yaml +from typing import Dict, Any, Optional, List +from uuid import uuid4 +import requests +import asyncio + +from db_repo_module.models.message_processors import MessageProcessors +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from flo_cloud.cloud_storage import CloudStorageManager +from common_module.log.logger import logger + +# Function Definition Schema v1.0 for reference +sample_yaml = """ +function: + code: | + export default function(input) { + return { + success: true, + message: input.message, + source: input.source, + timestamp: new Date().toISOString(), + }; + } + +input_schema: + required: + - message + - source + properties: + message: + type: string + description: The message to process + source: + type: string + description: The source of the message + +environment: + variables: + - name: LOG_LEVEL + value: "info" + - name: API_URL + value: "https://api.example.com" + +""" + + +class HermesClient: + def __init__(self, hermes_url: str): + self.hermes_url = hermes_url + + async def execute_code( + self, code: str, type: str, input: Dict[str, Any] + ) -> Dict[str, Any]: + def _do_request(): + resp = requests.post( + f'{self.hermes_url}/execute', + json={ + 'code': code, + 'type': type, + 'input': input, + }, + timeout=10, + ) + resp.raise_for_status() + return resp.json() + + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, _do_request) + + +class MessageProcessorService: + """Service for managing function processors stored in cloud storage.""" + + def __init__( + self, + cloud_manager: CloudStorageManager, + message_processor_repository: SQLAlchemyRepository[MessageProcessors], + bucket_name: str, + hermes_url: str, + ): + self.cloud_manager = cloud_manager + self.message_processor_repository = message_processor_repository + self.bucket_name = bucket_name + self.prefix = 'message_processors/v1' + self.hermes_client = HermesClient(hermes_url=hermes_url) + + async def create_message_processor( + self, name: str, yaml_content: str, description: Optional[str] = None + ) -> MessageProcessors: + message_processor_id = uuid4() + file_name = f'{message_processor_id}.yaml' + file_path = f'{self.prefix}/{file_name}' + await self.save_message_processor_yaml( + yaml_content=yaml_content, file_path=file_path + ) + return await self.message_processor_repository.create( + id=message_processor_id, + name=name, + description=description, + source=file_name, + ) + + async def save_message_processor_yaml( + self, yaml_content: str, file_path: str + ) -> None: + yaml_dict = yaml.safe_load(yaml_content) + # Validate required top-level fields + if not yaml_dict: + raise ValueError('YAML content is empty or invalid') + + required_fields = ['function', 'input_schema', 'type'] + missing_fields = [field for field in required_fields if field not in yaml_dict] + if missing_fields: + raise ValueError(f'YAML must contain required fields: {missing_fields}') + + # Validate input_schema structure + if 'required' not in yaml_dict['input_schema']: + raise ValueError("YAML input_schema must contain 'required' field") + + # Store YAML file in bucket + yaml_bytes = yaml_content.encode('utf-8') + self.cloud_manager.save_small_file( + file_content=yaml_bytes, bucket_name=self.bucket_name, key=file_path + ) + logger.info(f'Stored YAML file at {self.bucket_name}/{file_path}') + + async def get_message_processor(self, **kwargs) -> Optional[MessageProcessors]: + return await self.message_processor_repository.find_one(**kwargs) + + async def get_message_processor_yaml_content( + self, processor: MessageProcessors + ) -> str: + filepath = f'{self.prefix}/{processor.source}' + yaml_bytes = self.cloud_manager.read_file(self.bucket_name, filepath) + return yaml_bytes.decode('utf-8') + + async def list_message_processors(self) -> List[MessageProcessors]: + processors = await self.message_processor_repository.find() + return processors + + async def update_message_processor( + self, + processor: MessageProcessors, + updates: Dict[str, Any], + yaml_content: Optional[str] = None, + ) -> Optional[MessageProcessors]: + if yaml_content is not None: + file_path = f'{self.prefix}/{processor.source}' + await self.save_message_processor_yaml( + yaml_content=yaml_content, file_path=file_path + ) + + for key, value in updates.items(): + if hasattr(processor, key): + setattr(processor, key, value) + + return await self.message_processor_repository.find_one_and_update( + filters={'id': processor.id}, refresh=True, **updates + ) + + async def delete_message_processor(self, processor_id: str) -> bool: + processor = await self.get_message_processor(id=processor_id) + if not processor: + return False + + file_path = f'{self.prefix}/{processor.source}' + self.cloud_manager.delete_file(self.bucket_name, file_path) + logger.info(f'Deleted YAML file at {self.bucket_name}/{file_path}') + + await self.message_processor_repository.delete_all(id=processor_id) + return True + + async def execute_message_processor( + self, code: str, type: str, input: Dict[str, Any] + ) -> Dict[str, Any]: + result = await self.hermes_client.execute_code( + code=code, type=type, input=input + ) + return result diff --git a/wavefront/server/modules/plugins_module/plugins_module/utils/__init__.py b/wavefront/server/modules/plugins_module/plugins_module/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/modules/plugins_module/plugins_module/utils/authenticator_helper.py b/wavefront/server/modules/plugins_module/plugins_module/utils/authenticator_helper.py new file mode 100644 index 00000000..93e31927 --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/utils/authenticator_helper.py @@ -0,0 +1,135 @@ +from typing import Dict, Any, List +from pydantic import BaseModel + + +class AddAuthenticatorPayload(BaseModel): + auth_name: str + auth_type: str + config: Dict[str, Any] + + +def validate_google_oauth_config(config: Dict[str, Any]) -> List[str]: + """Validate Google OAuth configuration and return list of errors.""" + errors = [] + + required_fields = ['client_id', 'client_secret', 'redirect_uri'] + for field in required_fields: + if not config.get(field): + errors.append(f'Missing required field: {field}') + + # Validate redirect_uri format + redirect_uri = config.get('redirect_uri') + if redirect_uri and not ( + redirect_uri.startswith('http://') or redirect_uri.startswith('https://') + ): + errors.append('redirect_uri must be a valid HTTP/HTTPS URL') + + # Validate scopes + scopes = config.get('scopes', []) + if not isinstance(scopes, list) or len(scopes) == 0: + errors.append('scopes must be a non-empty list') + + return errors + + +def validate_microsoft_oauth_config(config: Dict[str, Any]) -> List[str]: + """Validate Microsoft OAuth configuration and return list of errors.""" + errors = [] + + required_fields = ['client_id', 'client_secret', 'tenant_id', 'redirect_uri'] + for field in required_fields: + if not config.get(field): + errors.append(f'Missing required field: {field}') + + # Validate redirect_uri format + redirect_uri = config.get('redirect_uri') + if redirect_uri and not ( + redirect_uri.startswith('http://') or redirect_uri.startswith('https://') + ): + errors.append('redirect_uri must be a valid HTTP/HTTPS URL') + + # Validate scopes + scopes = config.get('scopes', []) + if not isinstance(scopes, list) or len(scopes) == 0: + errors.append('scopes must be a non-empty list') + + # Validate authority + authority = config.get('authority', '') + if authority and not authority.startswith('https://'): + errors.append('authority must be a valid HTTPS URL') + + return errors + + +def validate_email_password_config(config: Dict[str, Any]) -> List[str]: + """Validate email/password configuration and return list of errors.""" + errors = [] + + password_policy = config.get('password_policy', {}) + + # Validate min_length + min_length = password_policy.get('min_length', 8) + if not isinstance(min_length, int) or min_length < 6: + errors.append('password_policy.min_length must be an integer >= 6') + + # Validate max_attempts + max_attempts = password_policy.get('max_attempts', 5) + if not isinstance(max_attempts, int) or max_attempts < 1: + errors.append('password_policy.max_attempts must be an integer >= 1') + + # Validate lockout_duration + lockout_duration = password_policy.get('lockout_duration', 900) + if not isinstance(lockout_duration, int) or lockout_duration < 60: + errors.append( + 'password_policy.lockout_duration must be an integer >= 60 (seconds)' + ) + + # Validate session_timeout + session_timeout = config.get('session_timeout', 3600) + if not isinstance(session_timeout, int) or session_timeout < 300: + errors.append('session_timeout must be an integer >= 300 (seconds)') + + return errors + + +def get_config_template(auth_type: str) -> Dict[str, Any]: + """Get configuration template for authenticator type.""" + + templates = { + 'email_password': { + 'password_policy': { + 'min_length': 8, + 'require_uppercase': True, + 'require_lowercase': True, + 'require_numbers': True, + 'require_special_chars': False, + 'max_attempts': 5, + 'lockout_duration': 900, + }, + 'two_factor_enabled': False, + 'password_reset_enabled': True, + 'session_timeout': 3600, + 'rate_limit_enabled': True, + }, + 'google_oauth': { + 'client_id': 'YOUR_GOOGLE_CLIENT_ID', + 'client_secret': 'YOUR_GOOGLE_CLIENT_SECRET', + 'redirect_uri': 'https://your-domain.com/auth/google/callback', + 'scopes': ['openid', 'email', 'profile'], + 'hosted_domain': None, + 'access_type': 'offline', + 'prompt': 'consent', + }, + 'microsoft_oauth': { + 'client_id': 'YOUR_MICROSOFT_CLIENT_ID', + 'client_secret': 'YOUR_MICROSOFT_CLIENT_SECRET', + 'tenant_id': 'YOUR_TENANT_ID', + 'redirect_uri': 'https://your-domain.com/auth/microsoft/callback', + 'scopes': ['openid', 'email', 'profile'], + 'authority': 'https://login.microsoftonline.com/', + 'response_type': 'code', + 'response_mode': 'query', + }, + } + + return templates.get(auth_type, {}) diff --git a/wavefront/server/modules/plugins_module/plugins_module/utils/helper.py b/wavefront/server/modules/plugins_module/plugins_module/utils/helper.py new file mode 100644 index 00000000..6e51d2ea --- /dev/null +++ b/wavefront/server/modules/plugins_module/plugins_module/utils/helper.py @@ -0,0 +1,111 @@ +from typing import List, Dict, Any +from pydantic import BaseModel +import json +import hashlib + + +class AddDatasourcePayload(BaseModel): + name: str + type: str + config: str + description: str | None = None + + +class UpdateDatasourcePayload(BaseModel): + name: str | None = None + type: str | None = None + config: str | None = None + description: str | None = None + + +class InsertRowsJsonPayload(BaseModel): + data: List[Dict[str, Any]] + + +class DynamicQueryRequest(BaseModel): + dynamic_query: str + + +class DynamicQueryExecuteRequest(BaseModel): + params: dict[str, str] + + +def generate_cache_key( + query_id: str, + filter: str = None, + rls_filter_str: str = None, + limit: int = None, + offset: int = None, + params: dict[str, str] = None, +) -> str: + """Generate a unique cache key based on query parameters.""" + key_dict = { + 'query_id': query_id, + 'filter': filter, + 'rls_filter': rls_filter_str, + 'limit': limit, + 'offset': offset, + 'params': params, + } + + key_json = json.dumps(key_dict, sort_keys=True, separators=(',', ':')) + hash_digest = hashlib.md5(key_json.encode()).hexdigest() + return f'dynamic_query:{hash_digest}' + + +def validate_yaml_query(yaml_query: dict) -> bool: + """ + Validate the structure of a dynamic query YAML file. + + Args: + yaml_query: Dictionary containing the parsed YAML query + + Returns: + bool: True if valid, False otherwise + """ + # Check top-level required fields + required_fields = ['id', 'queries', 'name'] + for field in required_fields: + if field not in yaml_query: + return False + + # Validate queries is a list + if not isinstance(yaml_query['queries'], list): + return False + + # Check that we have at least one query + if len(yaml_query['queries']) == 0: + return False + + # Track query IDs to ensure uniqueness + query_ids = set() + + # Validate each query in the queries list + queries_required_fields = ['id', 'description', 'query'] + for query in yaml_query['queries']: + # Check required fields for each query + for field in queries_required_fields: + if field not in query: + return False + + # Check for duplicate query IDs + query_id = query['id'] + if query_id in query_ids: + return False + query_ids.add(query_id) + + # Validate parameters if present (optional field) + if 'parameters' in query: + parameters = query['parameters'] + if not isinstance(parameters, list): + return False + + # Validate each parameter has required fields + for param in parameters: + if not isinstance(param, dict): + return False + # Parameters should have at least a name and type + if 'name' not in param or 'type' not in param: + return False + + return True diff --git a/wavefront/server/modules/plugins_module/pyproject.toml b/wavefront/server/modules/plugins_module/pyproject.toml new file mode 100644 index 00000000..b5f616ce --- /dev/null +++ b/wavefront/server/modules/plugins_module/pyproject.toml @@ -0,0 +1,46 @@ +[project] +name = "plugins-module" +version = "0.0.1" +description = "Authentication module" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "authenticator", + "auth-module", + "common-module", + "db-repo-module", + "datasource", + "user-management-module", + + "dependency-injector>=4.42.0,<5.0.0", +] + +[tool.uv.sources] +authenticator = { workspace = true } +auth-module = { workspace = true } +common-module = { workspace = true } +db-repo-module = { workspace = true } +datasource = { workspace = true } +user-management-module = { workspace = true} + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["plugins_module"] diff --git a/wavefront/server/modules/product_analysis_module/product_analysis_module/controllers/product_anaysis_controllers.py b/wavefront/server/modules/product_analysis_module/product_analysis_module/controllers/product_anaysis_controllers.py new file mode 100644 index 00000000..d3cba029 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/product_analysis_module/controllers/product_anaysis_controllers.py @@ -0,0 +1,101 @@ +from fastapi import APIRouter, Depends, Request, status +from fastapi.responses import JSONResponse +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from dependency_injector.wiring import Provide, inject +from product_analysis_module.models.product_analysis import ( + CreateProductAnalysisPayload, + ProductAnalysis, +) +from user_management_module.utils.user_utils import get_current_user, check_is_admin +from datetime import datetime +from product_analysis_module.product_analysis_container import ProductAnalysisContainer +from product_analysis_module.product_analysis_service import ProductAnalysisService + + +product_analysis_router = APIRouter(prefix='/v1') + + +@product_analysis_router.post('/product-analysis') +@inject +async def create_product_analysis( + request: Request, + payload: CreateProductAnalysisPayload, + product_analysis_service: ProductAnalysisService = Depends( + Provide[ProductAnalysisContainer.product_analysis_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + This endpoint is used to create a product analysis event. + """ + + user_role, user_id, session_id = get_current_user(request) + if not user_id: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + # Create ProductAnalysis object from the payload with server-added fields + product_analysis = ProductAnalysis( + event_name=payload.event_name, + type=payload.type, + sub_type=payload.sub_type, + category=payload.category, + sub_category=payload.sub_category, + action=payload.action, + action_type=payload.action_type, + page=payload.page, + page_path=payload.page_path, + matadata=payload.matadata, + user_id=user_id, + user_role=user_role, + session_id=session_id, + created_at=datetime.now(), + ) + + await product_analysis_service.create_product_analysis(product_analysis) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + {'message': 'The event has been logged successfully'} + ), + ) + + +@product_analysis_router.get('/product-analysis') +@inject +async def get_product_analysis( + request: Request, + product_analysis_service: ProductAnalysisService = Depends( + Provide[ProductAnalysisContainer.product_analysis_service] + ), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + This endpoint is used to get the product analysis events if the user is an admin. + """ + user_role_id, user_id, _ = get_current_user(request) + user_role = await check_is_admin(user_role_id) + + if not user_id or not user_role: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + product_analysis = await product_analysis_service.get_product_analysis() + product_analysis_response = [item.to_dict() for item in product_analysis] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'product_analysis': product_analysis_response} + ), + ) diff --git a/wavefront/server/modules/product_analysis_module/product_analysis_module/models/product_analysis.py b/wavefront/server/modules/product_analysis_module/product_analysis_module/models/product_analysis.py new file mode 100644 index 00000000..f55f7ae3 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/product_analysis_module/models/product_analysis.py @@ -0,0 +1,37 @@ +from pydantic import BaseModel +from datetime import datetime +from typing import Optional, Dict, Any + + +class CreateProductAnalysisPayload(BaseModel): + """Payload sent by the user - only these fields are provided by the client""" + + event_name: str + type: Optional[str] = None + sub_type: Optional[str] = None + category: Optional[str] = None + sub_category: Optional[str] = None + action: Optional[str] = None + action_type: Optional[str] = None + page: str + page_path: str + matadata: Optional[Dict[str, Any]] = None + + +class ProductAnalysis(BaseModel): + """Complete product analysis model with all fields including server-added ones""" + + event_name: str + type: Optional[str] = None + sub_type: Optional[str] = None + category: Optional[str] = None + sub_category: Optional[str] = None + action: Optional[str] = None + action_type: Optional[str] = None + page: str + page_path: str + matadata: Optional[Dict[str, Any]] = None + user_id: str + session_id: str + user_role: str + created_at: datetime diff --git a/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_container.py b/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_container.py new file mode 100644 index 00000000..e340bf11 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_container.py @@ -0,0 +1,7 @@ +from dependency_injector import containers +from dependency_injector import providers +from product_analysis_module.product_analysis_service import ProductAnalysisService + + +class ProductAnalysisContainer(containers.DeclarativeContainer): + product_analysis_service = providers.Singleton(ProductAnalysisService) diff --git a/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_service.py b/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_service.py new file mode 100644 index 00000000..5c8be2b0 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/product_analysis_module/product_analysis_service.py @@ -0,0 +1,39 @@ +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.product_analytics import ProductAnalytics +from db_repo_module.db_repo_container import DatabaseModuleContainer +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Depends +from product_analysis_module.models.product_analysis import ProductAnalysis + + +class ProductAnalysisService: + @inject + def __init__( + self, + product_analysis_repository: SQLAlchemyRepository[ProductAnalytics] = Depends( + Provide[DatabaseModuleContainer.product_analytics_repository] + ), + ): + self.product_analysis_repository = product_analysis_repository + + async def create_product_analysis(self, payload: ProductAnalysis): + await self.product_analysis_repository.create( + event_name=payload.event_name, + type=payload.type, + sub_type=payload.sub_type, + category=payload.category, + sub_category=payload.sub_category, + action=payload.action, + action_type=payload.action_type, + page=payload.page, + page_path=payload.page_path, + matadata=payload.matadata, + user_id=payload.user_id, + session_id=payload.session_id, + user_role=payload.user_role, + created_at=payload.created_at, + ) + + async def get_product_analysis(self): + return await self.product_analysis_repository.find() diff --git a/wavefront/server/modules/product_analysis_module/pyproject.toml b/wavefront/server/modules/product_analysis_module/pyproject.toml new file mode 100644 index 00000000..97254da4 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/pyproject.toml @@ -0,0 +1,30 @@ +[project] +name = "product-analysis-module" +version = "0.1.0" +description = "" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] + +requires-python = ">=3.11" +dependencies = [ + "common-module", + "db-repo-module", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } + +[build-system] +requires = ["poetry-core>=2.0.0,<3.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.hatch.build.targets.wheel] +packages = ["product_analysis_module"] diff --git a/wavefront/server/modules/product_analysis_module/tests/conftest.py b/wavefront/server/modules/product_analysis_module/tests/conftest.py new file mode 100644 index 00000000..7a78476f --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/tests/conftest.py @@ -0,0 +1,219 @@ +import json +from unittest.mock import Mock +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.user_container import UserContainer +from product_analysis_module.controllers.product_anaysis_controllers import ( + product_analysis_router, +) +from product_analysis_module.product_analysis_container import ProductAnalysisContainer + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def setup_containers(test_engine, test_session, test_user_id, test_session_id): + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + user_container = UserContainer() + product_analysis_container = ProductAnalysisContainer() + + common_container = CommonContainer() + + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'device_info': 'Mozilla/5.0'} + ) + # For reset password + cache_manager_mock.get_str.side_effect = ( + lambda key: test_user_id + if key == 'mock_reset_code' + else json.dumps({'user_id': test_user_id, 'device_info': 'Mozilla/5.0'}) + ) + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + + user_container.db_client.override(mock_db_client) + user_container.cache_manager.override(cache_manager_mock) + + # Mock token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + 'code': 'mock_reset_code', + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + auth_container = AuthContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + auth_container.token_service.override(mock_token_service) + + # mocking auth container superset_service + mock_superset_service = Mock() + mock_superset_service.generate_guest_token.return_value = 'mock_guest_token' + auth_container.superset_service.override(mock_superset_service) + + user_container.wire( + packages=[ + 'user_management_module.authorization', + 'user_management_module.utils', + 'auth_module.controllers', + ] + ) + + product_analysis_container.wire( + packages=[ + 'product_analysis_module.controllers', + ] + ) + + # Wire the database repository to the product analysis service + db_repo_container.wire( + modules=[__name__], + packages=['product_analysis_module.product_analysis_service'], + ) + + common_container.wire( + packages=[ + 'auth_module.controllers', + 'user_management_module.authorization', + 'product_analysis_module.controllers', + ] + ) + auth_container.wire( + packages=[ + 'user_management_module.authorization', + ] + ) + + yield auth_container, common_container, product_analysis_container + auth_container.unwire() + common_container.unwire() + product_analysis_container.unwire() + db_repo_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequestIdMiddleware) + app.add_middleware(RequireAuthMiddleware) + app.include_router(product_analysis_router, prefix='/floware') + return TestClient(app) + + +@pytest.fixture +def mock_auth_functions(monkeypatch): + async def mock_get_current_user(request): + return 'test_user_id', 'test_role_id', 'test_session_id' + + async def mock_check_is_admin(role_id): + return True + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.get_current_user', + mock_get_current_user, + ) + + +@pytest.fixture +def mock_admin_functions(monkeypatch): + """Mock check_is_admin to return True for admin tests""" + + async def mock_check_is_admin(role_id, role_repository=None): + return True + + monkeypatch.setattr( + 'product_analysis_module.controllers.product_anaysis_controllers.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mock_non_admin_functions(monkeypatch): + """Mock check_is_admin to return False for non-admin tests""" + + async def mock_check_is_admin(role_id, role_repository=None): + return False + + monkeypatch.setattr( + 'user_management_module.utils.user_utils.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token diff --git a/wavefront/server/modules/product_analysis_module/tests/test_product_analysis_controllers.py b/wavefront/server/modules/product_analysis_module/tests/test_product_analysis_controllers.py new file mode 100644 index 00000000..07fa71c8 --- /dev/null +++ b/wavefront/server/modules/product_analysis_module/tests/test_product_analysis_controllers.py @@ -0,0 +1,371 @@ +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.fixture +def payload(): + return { + 'event_name': 'button_click', + 'type': 'interaction', + 'sub_type': 'cta_click', + 'category': 'user_engagement', + 'sub_category': 'homepage', + 'action': 'click', + 'action_type': 'primary', + 'page': 'home', + 'page_path': '/home', + 'matadata': { + 'button_id': 'signup-btn', + 'timestamp': '2025-08-11T15:45:00Z', + 'device': 'desktop', + 'browser': 'Chrome', + 'experiment_variant': 'A', + }, + } + + +@pytest.mark.asyncio +async def test_post_product_analysis( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + payload, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.post( + '/floware/v1/product-analysis', + json=payload, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + print(response.json()) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_post_product_analysis_invalid_token( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + payload, + setup_containers, +): + # override the token service + auth_container, _, _ = setup_containers + token_service = auth_container.token_service() + token_service.decode_token.return_value = {} + auth_container.token_service.override(token_service) + + await create_session(test_session, test_user_id, test_session_id) + response = test_client.post( + '/floware/v1/product-analysis', + json=payload, + headers={'Authorization': 'Bearer 12323'}, + ) + print(response.json()) + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_invalid_payload( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + payload, + setup_containers, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.post( + '/floware/v1/product-analysis', + json={ + 'event_name': 'button_click', + }, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 422 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_valid_payload( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + payload, + setup_containers, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.post( + '/floware/v1/product-analysis', + json=payload, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_minimal_payload( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + """Test POST endpoint with minimal required payload""" + await create_session(test_session, test_user_id, test_session_id) + + minimal_payload = {'event_name': 'page_view', 'page': 'home', 'page_path': '/home'} + + response = test_client.post( + '/floware/v1/product-analysis', + json=minimal_payload, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_null_optional_fields( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + """Test POST endpoint with null values for optional fields""" + await create_session(test_session, test_user_id, test_session_id) + + payload_with_nulls = { + 'event_name': 'button_click', + 'type': None, + 'sub_type': None, + 'category': None, + 'sub_category': None, + 'action': None, + 'action_type': None, + 'page': 'home', + 'page_path': '/home', + 'matadata': None, + } + + response = test_client.post( + '/floware/v1/product-analysis', + json=payload_with_nulls, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_special_characters( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + """Test POST endpoint with special characters in event data""" + await create_session(test_session, test_user_id, test_session_id) + + special_char_payload = { + 'event_name': 'form_submit_&_validate', + 'type': 'interaction', + 'sub_type': 'form_complete', + 'category': 'conversion', + 'sub_category': 'signup_flow', + 'action': 'submit', + 'action_type': 'primary', + 'page': 'signup-page', + 'page_path': '/signup?utm_source=google&utm_medium=cpc', + 'matadata': { + 'form_name': 'user_registration_form', + 'special_chars': 'test@example.com', + 'unicode_text': 'cafรฉ rรฉsumรฉ naรฏve', + 'html_entities': "<script>alert('test')</script>", + }, + } + + response = test_client.post( + '/floware/v1/product-analysis', + json=special_char_payload, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_post_product_analysis_with_large_metadata( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + """Test POST endpoint with large metadata payload""" + await create_session(test_session, test_user_id, test_session_id) + + large_metadata = { + 'event_name': 'user_session', + 'type': 'session', + 'sub_type': 'session_start', + 'category': 'user_engagement', + 'sub_category': 'session_tracking', + 'action': 'start', + 'action_type': 'automatic', + 'page': 'dashboard', + 'page_path': '/dashboard', + 'matadata': { + 'session_id': 'sess_' + 'x' * 100, + 'user_preferences': { + 'theme': 'dark', + 'language': 'en-US', + 'timezone': 'America/New_York', + 'notifications': {'email': True, 'push': False, 'sms': True}, + }, + 'browser_details': { + 'user_agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + 'language': 'en-US,en;q=0.9', + 'platform': 'Win32', + 'cookie_enabled': True, + 'do_not_track': False, + }, + 'performance_metrics': { + 'page_load_time': 1250, + 'dom_content_loaded': 850, + 'first_contentful_paint': 1200, + 'largest_contentful_paint': 1800, + 'cumulative_layout_shift': 0.05, + }, + 'analytics_data': { + 'ga_client_id': '123456789.1234567890', + 'gtm_container_id': 'GTM-XXXXXXX', + 'custom_dimensions': { + 'cd1': 'premium_user', + 'cd2': 'mobile_device', + 'cd3': 'returning_visitor', + }, + }, + }, + } + + response = test_client.post( + '/floware/v1/product-analysis', + json=large_metadata, + headers={'Authorization': 'Bearer 12323'}, + ) + assert response.status_code == 201 + + +@pytest.mark.asyncio +async def test_get_product_analysis_as_admin( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + mock_admin_functions, +): + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.get( + '/floware/v1/product-analysis', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 200 + + +@pytest.mark.asyncio +async def test_get_product_analysis_unauthorized( + test_client, test_session: AsyncSession, test_user_id, test_session_id +): + """Test GET endpoint without authorization token""" + await create_session(test_session, test_user_id, test_session_id) + + response = test_client.get('/floware/v1/product-analysis') + + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_get_product_analysis_invalid_token( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + """Test GET endpoint with invalid authorization token""" + await create_session(test_session, test_user_id, test_session_id) + + # Override the token service to return invalid token + auth_container, _, _ = setup_containers + token_service = auth_container.token_service() + token_service.decode_token.return_value = {} + auth_container.token_service.override(token_service) + + response = test_client.get( + '/floware/v1/product-analysis', + headers={'Authorization': 'Bearer invalid_token_123'}, + ) + + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_get_product_analysis_with_query_params( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + mock_admin_functions, +): + """Test GET endpoint with query parameters for filtering""" + await create_session(test_session, test_user_id, test_session_id) + + # Test with various query parameters + query_params = { + 'page': 1, + 'size': 10, + 'event_name': 'button_click', + 'page_path': '/home', + 'start_date': '2025-01-01', + 'end_date': '2025-12-31', + } + + response = test_client.get( + '/floware/v1/product-analysis', + params=query_params, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 200 + # Verify response structure + response_data = response.json() + assert isinstance(response_data, dict) diff --git a/wavefront/server/modules/tools_module/pyproject.toml b/wavefront/server/modules/tools_module/pyproject.toml new file mode 100644 index 00000000..dc8b22b9 --- /dev/null +++ b/wavefront/server/modules/tools_module/pyproject.toml @@ -0,0 +1,40 @@ +[project] +name = "tools_module" +version = "0.1.0" +description = "Tools module for Flo AI agent system" +dependencies = [ + "flo-ai>=1.1.0-rc5", + "flo_cloud", + + "datasource", + "common_module", + "plugins-module", + "knowledge-base-module" +] + +[tool.uv.sources] +flo-cloud = { workspace = true } +common-module = { workspace = true } +plugins-module = { workspace = true } +datasource = { workspace = true } +knowledge-base-module = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.3,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["tools_module"] diff --git a/wavefront/server/modules/tools_module/tools_module/available_tools.json b/wavefront/server/modules/tools_module/tools_module/available_tools.json new file mode 100644 index 00000000..d28cdeec --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/available_tools.json @@ -0,0 +1,161 @@ +{ + "bigquery_test_connection": { + "name": "bigquery_test_connection", + "description": "Test BigQuery connection using the configured datasource. Returns a descriptive string indicating success or failure with connection details.", + "parameters": { + "datasource_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + } + }, + "prefill_values": [ + "datasource_id" + ], + "category": "datasource" + }, + "bigquery_get_schema": { + "name": "bigquery_get_schema", + "description": "Get BigQuery dataset schema information including table structures. Returns formatted text showing tables and their column details.", + "parameters": { + "datasource_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + }, + "table_id": { + "type": "string", + "description": "Table id of the table" + } + }, + "prefill_values": [ + "datasource_id" + ], + "required": [ + "table_id" + ], + "category": "datasource" + }, + "bigquery_get_table_names": { + "name": "bigquery_get_table_names", + "description": "Get list of table names from BigQuery dataset. Returns formatted text with numbered list of tables and count.", + "parameters": { + "datasource_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + } + }, + "prefill_values": [ + "datasource_id" + ], + "category": "datasource" + }, + "bigquery_insert_rows": { + "name": "bigquery_insert_rows", + "description": "Insert rows of data into a BigQuery table. Returns descriptive string confirming insertion success or detailing any errors.", + "parameters": { + "datasource_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + }, + "table_name": { + "type": "string", + "description": "Name of the table to insert data into" + }, + "data": { + "type": "array", + "description": "Array of objects representing rows to insert", + "items": { + "type": "TYPE_UNSPECIFIED" + } + } + }, + "prefill_values": [ + "datasource_id" + ], + "required": [ + "table_name", + "data" + ], + "category": "datasource" + }, + "bigquery_execute_query": { + "name": "bigquery_execute_query", + "description": "Execute a custom BigQuery SQL query using the configured datasource. Supports both dry runs for validation and actual execution. Returns descriptive string with execution status and job details.", + "parameters": { + "datasource_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + }, + "query": { + "type": "string", + "description": "SQL query to execute" + }, + "use_legacy_sql": { + "type": "boolean", + "description": "Whether to use legacy SQL syntax (default: false)" + }, + "dry_run": { + "type": "boolean", + "description": "Whether to perform a dry run for query validation without execution (default: false)" + } + }, + "prefill_values": [ + "datasource_id" + ], + "required": [ + "query" + ], + "category": "datasource" + }, + "querying_knowlegebase": { + "name": "querying_knowlegebase", + "description": "Query the knowlegebase using the configured datasource. Returns a descriptive string indicating success or failure with connection details.", + "parameters": { + "kb_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + }, + "inference_id": { + "type": "string", + "description": "UUID of the configured BigQuery datasource" + }, + "question": { + "type": "string", + "description": "Question to query the knowlegebase" + } + }, + "prefill_values": [ + "kb_id", + "inference_id" + ], + "required": [ + "question" + ], + "category": "knowlegebase" + }, + "send_email": { + "name": "send_email", + "description": "Send an email to the specified recipient. Returns a descriptive string indicating success or failure of the email delivery process.", + "parameters": { + "email_id": { + "type": "string", + "description": "Email address of the recipient to whom the message will be sent" + }, + "email_subject": { + "type": "string", + "description": "Subject of the email" + }, + "email_body": { + "type": "string", + "description": "Body of the email" + } + }, + "prefill_values": [ + "email_id" + ], + "required": [ + "email_subject", + "email_body" + ], + "category": "email" + } +} diff --git a/wavefront/server/modules/tools_module/tools_module/controllers/tools_controller.py b/wavefront/server/modules/tools_module/tools_module/controllers/tools_controller.py new file mode 100644 index 00000000..1a929f72 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/controllers/tools_controller.py @@ -0,0 +1,185 @@ +from typing import Optional +from common_module.security import bearer_auth +from fastapi import APIRouter, HTTPException, Query, Depends, Security, status +from fastapi.responses import JSONResponse +from dependency_injector.wiring import Provide, inject +from common_module.response_formatter import ResponseFormatter +from common_module.common_container import CommonContainer +from common_module.models.response import GenericResponseModel, DataWrapper +from tools_module.services.tool_service import ToolService +from tools_module.tools_container import ToolsContainer +from tools_module.models.tool_schemas import ( + ToolsListData, + ToolNamesData, + ToolDetailsData, + ToolMetadataData, + ValidationResultData, + ValidateToolsRequest, +) + +tools_router = APIRouter(prefix='/v1/tools', tags=['Tools']) + + +@tools_router.get( + '/', + response_model=GenericResponseModel[DataWrapper[ToolsListData]], + dependencies=[Security(bearer_auth)], +) +@inject +async def get_all_tools( + category: Optional[str] = Query( + None, description="Filter tools by category (e.g., 'datasource')" + ), + tool_service: ToolService = Depends(Provide[ToolsContainer.tool_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get all available tools with their metadata including parameters and descriptions + """ + if category: + tools = tool_service.get_tools_by_category(category) + message = f"Retrieved tools for category '{category}'" + else: + tools = tool_service.get_available_tools() + message = 'Retrieved all available tools' + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': message, 'data': {'tools': tools, 'count': len(tools)}} + ), + ) + + +@tools_router.get( + '/names', + response_model=GenericResponseModel[DataWrapper[ToolNamesData]], + dependencies=[Security(bearer_auth)], +) +@inject +async def get_tool_names( + tool_service: ToolService = Depends(Provide[ToolsContainer.tool_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get list of available tool names only + """ + tool_names = tool_service.get_tool_names() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Retrieved available tool names', + 'data': {'tool_names': tool_names, 'count': len(tool_names)}, + } + ), + ) + + +@tools_router.get( + '/tool-details', + response_model=GenericResponseModel[DataWrapper[ToolDetailsData]], + dependencies=[Security(bearer_auth)], +) +@inject +async def get_tool_details( + tool_service: ToolService = Depends(Provide[ToolsContainer.tool_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get list of available tool details + """ + tool_details_models = await tool_service.get_all_tool_details() + tool_details = [t.model_dump() for t in tool_details_models] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Retrieved available tool names', + 'data': {'tool_details': tool_details, 'count': len(tool_details)}, + } + ), + ) + + +@tools_router.get( + '/{tool_name}', + response_model=GenericResponseModel[DataWrapper[ToolMetadataData]], + dependencies=[Security(bearer_auth)], +) +@inject +async def get_tool_by_name( + tool_name: str, + tool_service: ToolService = Depends(Provide[ToolsContainer.tool_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Get metadata for a specific tool by name + """ + tool_metadata = tool_service.get_tool_metadata(tool_name) + + if not tool_metadata: + raise HTTPException(status_code=404, detail=f"Tool '{tool_name}' not found") + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': f"Retrieved tool metadata for '{tool_name}'", + 'data': {'tool': tool_metadata}, + } + ), + ) + + +@tools_router.post( + '/validate', + response_model=GenericResponseModel[DataWrapper[ValidationResultData]], + dependencies=[Security(bearer_auth)], +) +@inject +async def validate_tools( + request: ValidateToolsRequest, + tool_service: ToolService = Depends(Provide[ToolsContainer.tool_service]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + """ + Validate that a list of tool names exist + """ + missing_tools = tool_service.validate_tools_exist(request.tool_names) + valid_tools = [name for name in request.tool_names if name not in missing_tools] + all_valid = len(missing_tools) == 0 + + message = ( + 'All tools are valid' + if all_valid + else f'Found {len(missing_tools)} invalid tools' + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': message, + 'data': { + 'validation_result': { + 'valid_tools': valid_tools, + 'missing_tools': missing_tools, + 'all_valid': all_valid, + 'total_checked': len(request.tool_names), + } + }, + } + ), + ) diff --git a/wavefront/server/modules/tools_module/tools_module/datasources/bigquery_tools.py b/wavefront/server/modules/tools_module/tools_module/datasources/bigquery_tools.py new file mode 100644 index 00000000..4cf2ab8f --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/datasources/bigquery_tools.py @@ -0,0 +1,317 @@ +import asyncio +from typing import Any, Dict, List, Optional, cast + +from datasource import BigQueryConfig, DataSourceType +from datasource.bigquery import BigQueryPlugin +from plugins_module.services.datasource_services import get_datasource_config + + +async def bigquery_test_connection(datasource_id: str) -> str: + """Test BigQuery connection using configured datasource""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"โŒ Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"โŒ Invalid datasource type '{ds_type}' for BigQuery tool" + + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + result = plugin.test_connection() + + if result: + return f"โœ… Successfully connected to BigQuery project '{bq_config.project_id}', dataset '{bq_config.dataset_id}' in location '{bq_config.location}'" + else: + return f"โŒ Failed to connect to BigQuery project '{bq_config.project_id}', dataset '{bq_config.dataset_id}' in location '{bq_config.location}'" + except Exception as e: + return f"โŒ Connection error for datasource '{datasource_id}': {str(e)}" + + +async def bigquery_get_schema(datasource_id: str, table_id: str) -> str: + """Get BigQuery dataset schema information using configured datasource""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"โŒ Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"โŒ Invalid datasource type '{ds_type}' for BigQuery tool" + + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + schema_info = plugin.get_schema(table_id) + + if not schema_info: + return f"No schema information available for dataset '{bq_config.dataset_id}' in project '{bq_config.project_id}'" + + # Format schema information as readable text + result = f"๐Ÿ“Š Schema for dataset '{bq_config.dataset_id}' in project '{bq_config.project_id}':\n\n" + + result += str(schema_info) + + return result + except Exception as e: + return f"โŒ Error retrieving schema for datasource '{datasource_id}': {str(e)}" + + +async def bigquery_get_table_names(datasource_id: str) -> str: + """Get list of table names from BigQuery dataset using configured datasource""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"โŒ Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"โŒ Invalid datasource type '{ds_type}' for BigQuery tool" + + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + table_names = plugin.get_table_names() + + if not table_names: + return f"No tables found in dataset '{bq_config.dataset_id}' in project '{bq_config.project_id}'" + + result = f"๐Ÿ“‹ Found {len(table_names)} table(s) in dataset '{bq_config.dataset_id}' (project '{bq_config.project_id}'):\n\n" + + for i, table_name in enumerate(table_names, 1): + result += f'{i}. {table_name}\n' + + return result + except Exception as e: + return f"โŒ Error retrieving table names for datasource '{datasource_id}': {str(e)}" + + +async def bigquery_fetch_data( + datasource_id: str, + table_names: List[str], + projection: str = '*', + where_clause: str = 'true', + join_query: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + offset: int = 0, + limit: int = 1000, + order_by: Optional[str] = None, + group_by: Optional[str] = None, +) -> str: + """Fetch data from BigQuery tables using configured datasource with optional filtering and joins""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"โŒ Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"โŒ Invalid datasource type '{ds_type}' for BigQuery tool" + + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + results = plugin.fetch_data( + table_names=table_names, + projection=projection, + where_clause=where_clause, + join_query=join_query, + params=params, + offset=offset, + limit=limit, + order_by=order_by, + group_by=group_by, + ) + + if not results: + table_list = ', '.join(table_names) + return ( + f"No data found in table(s) '{table_list}' matching the query criteria" + ) + + table_list = ', '.join(table_names) + result = f"๐Ÿ“Š Retrieved {len(results)} record(s) from table(s) '{table_list}' in dataset '{bq_config.dataset_id}':\n\n" + + # Show first few records + display_limit = min(5, len(results)) + + if results and isinstance(results[0], dict): + # Get column headers from first record + headers = list(results[0].keys()) + result += 'Columns: ' + ', '.join(headers) + '\n\n' + + for i, row in enumerate(results[:display_limit], 1): + result += f'Record {i}:\n' + for key, value in row.items(): + # Truncate long values + str_value = str(value) + if len(str_value) > 100: + str_value = str_value[:100] + '...' + result += f' {key}: {str_value}\n' + result += '\n' + + if len(results) > display_limit: + result += f'... and {len(results) - display_limit} more record(s)\n' + + # Add query details + result += '\nQuery details:\n' + result += f' Projection: {projection}\n' + result += f' Where: {where_clause}\n' + result += f' Limit: {limit}\n' + result += f' Offset: {offset}\n' + if order_by: + result += f' Order by: {order_by}\n' + + return result + except Exception as e: + table_list = ', '.join(table_names) + return f"โŒ Error fetching data from table(s) '{table_list}' for datasource '{datasource_id}': {str(e)}" + + +async def bigquery_insert_rows( + datasource_id: str, table_name: str, data: List[Dict[str, Any]] +) -> str: + """Insert rows into BigQuery table using configured datasource""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"โŒ Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"โŒ Invalid datasource type '{ds_type}' for BigQuery tool" + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + + if not data: + return f"No data provided for insertion into table '{table_name}'" + + result = plugin.insert_rows_json(table_name, data) + + # Check if insertion was successful + if result is None or (isinstance(result, list) and len(result) == 0): + # Success case (empty errors list means success) + return f"โœ… Successfully inserted {len(data)} record(s) into table '{table_name}' in dataset '{bq_config.dataset_id}' (project '{bq_config.project_id}')" + else: + # Handle error cases + if isinstance(result, list) and len(result) > 0: + error_details = '; '.join( + [str(error) for error in result[:3]] + ) # Show first 3 errors + more_errors = f' and {len(result) - 3} more' if len(result) > 3 else '' + return f"โŒ Failed to insert some records into table '{table_name}': {error_details}{more_errors}" + else: + return f'โš ๏ธ Insertion completed with result: {str(result)}' + + except Exception as e: + return f"โŒ Error inserting records for datasource '{datasource_id}', table '{table_name}': {str(e)}" + + +async def bigquery_execute_query( + datasource_id: str, query: str, use_legacy_sql: bool = False, dry_run: bool = False +) -> str: + """Execute a BigQuery SQL query using configured datasource""" + try: + # Get datasource config from database + ds_type, config = await get_datasource_config(datasource_id) + + if not config: + return f"Datasource '{datasource_id}' not found" + + if ds_type != DataSourceType.GCP_BIGQUERY: + return f"Invalid datasource type '{ds_type}' for BigQuery tool" + + bq_config = cast(BigQueryConfig, config) + plugin = BigQueryPlugin(bq_config) + + if not query or not query.strip(): + return 'โŒ No query provided for execution' + + result = await plugin.execute_query( + query, use_legacy_sql=use_legacy_sql, dry_run=dry_run + ) + + if dry_run: + # For dry run, return query validation info + return f"โœ… Query validation successful for project '{bq_config.project_id}'. Query is valid and ready to execute." + else: + # For actual execution, get results and format response + job_id = getattr(result, 'job_id', 'unknown') + + try: + # Get query results if available + # result.result() is a blocking call, so run it in a thread pool + query_results = await asyncio.to_thread( + lambda: list(result.result()) + ) # Convert iterator to list + + if query_results: + # Format results similar to bigquery_fetch_data + response = f"โœ… Query executed successfully in project '{bq_config.project_id}' (Job ID: {job_id})\n\n" + response += f'๐Ÿ“Š Retrieved {len(query_results)} record(s):\n\n' + + # Show first few records + display_limit = min(5, len(query_results)) + + if query_results and len(query_results[0]) > 0: + # Get column headers from first record + headers = ( + list(query_results[0].keys()) + if hasattr(query_results[0], 'keys') + else [f'col_{i}' for i in range(len(query_results[0]))] + ) + response += 'Columns: ' + ', '.join(headers) + '\n\n' + + for i, row in enumerate(query_results[:display_limit], 1): + response += f'Record {i}:\n' + if hasattr(row, 'items'): # Row object with key-value pairs + for key, value in row.items(): + str_value = str(value) + if len(str_value) > 100: + str_value = str_value[:100] + '...' + response += f' {key}: {str_value}\n' + else: # Simple tuple/list row + for j, value in enumerate(row): + str_value = str(value) + if len(str_value) > 100: + str_value = str_value[:100] + '...' + response += f" {headers[j] if j < len(headers) else f'col_{j}'}: {str_value}\n" + response += '\n' + + if len(query_results) > display_limit: + response += f'... and {len(query_results) - display_limit} more record(s)\n' + + return response + else: + # No results (e.g., INSERT, UPDATE, DELETE, DDL statements) + num_affected = getattr(result, 'num_dml_affected_rows', None) + if num_affected is not None: + return f"โœ… Query executed successfully in project '{bq_config.project_id}' (Job ID: {job_id})\nAffected rows: {num_affected}" + else: + return f"โœ… Query executed successfully in project '{bq_config.project_id}' (Job ID: {job_id})" + + except Exception as result_error: + # If we can't get results, just return success with job info + return f"โœ… Query executed successfully in project '{bq_config.project_id}' (Job ID: {job_id})\nโš ๏ธ Could not retrieve results: {str(result_error)}" + + except Exception as e: + error_msg = str(e).lower() + + # Enhanced error handling for common BigQuery issues + if 'table' in error_msg and 'not found' in error_msg: + return f"โŒ Table not found. Please check that the table exists in dataset '{bq_config.dataset_id}' of project '{bq_config.project_id}'. Error: {str(e)}" + elif 'dataset' in error_msg and 'not found' in error_msg: + return f"โŒ Dataset '{bq_config.dataset_id}' not found in project '{bq_config.project_id}'. Please check your datasource configuration. Error: {str(e)}" + elif 'permission' in error_msg or 'access' in error_msg: + return f"โŒ Permission denied. Please check your BigQuery credentials and access rights to project '{bq_config.project_id}' and dataset '{bq_config.dataset_id}'. Error: {str(e)}" + elif 'syntax error' in error_msg or 'invalid query' in error_msg: + return f"โŒ SQL syntax error. Please check your query syntax. Note: Table names are automatically qualified with dataset '{bq_config.dataset_id}'. Error: {str(e)}" + elif 'quota' in error_msg or 'exceeded' in error_msg: + return f'โŒ BigQuery quota or limits exceeded. Please try again later or contact your administrator. Error: {str(e)}' + else: + return ( + f"โŒ Error executing query for datasource '{datasource_id}': {str(e)}" + ) diff --git a/wavefront/server/modules/tools_module/tools_module/datasources/provider.py b/wavefront/server/modules/tools_module/tools_module/datasources/provider.py new file mode 100644 index 00000000..b7e2c996 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/datasources/provider.py @@ -0,0 +1,55 @@ +from typing import List, Dict, Any +from tools_module.interfaces.tool_details_provider import ToolDetailsProvider +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.datasource import Datasource +from tools_module.models.tool_schemas import ToolExecutionDetails + + +class DatasourceToolDetailsProvider(ToolDetailsProvider): + def __init__(self, datasource_repository: SQLAlchemyRepository[Datasource]): + self.datasource_repository = datasource_repository + + def can_handle(self, category: str) -> bool: + return category == 'datasource' + + async def get_tool_details( + self, tool_metadata: Dict[str, Any] + ) -> List[ToolExecutionDetails]: + tool_details = [] + prefill_values = tool_metadata.get('prefill_values', []) + + if 'datasource_id' in prefill_values: + all_datasource = await self.datasource_repository.find() + all_datasource = [datasource.to_dict() for datasource in all_datasource] + + for datasource in all_datasource: + tool_details.append( + ToolExecutionDetails( + name=tool_metadata['name'], + prefill_parameter_names=prefill_values, + prefilled_value={ + 'datasource_id': datasource['id'], + }, + resource_name=datasource['name'], + required=tool_metadata.get('required', []), + parameters=tool_metadata['parameters'], + description=tool_metadata['description'], + category=tool_metadata['category'], + ) + ) + else: + # Fallback if no datasource_id prefill is expected, though unlikely for this category based on current logic + tool_details.append( + ToolExecutionDetails( + name=tool_metadata['name'], + resource_name='', + prefill_parameter_names=prefill_values, + prefilled_value={}, + required=tool_metadata.get('required', []), + parameters=tool_metadata['parameters'], + description=tool_metadata['description'], + category=tool_metadata['category'], + ) + ) + + return tool_details diff --git a/wavefront/server/modules/tools_module/tools_module/email/email_tool.py b/wavefront/server/modules/tools_module/tools_module/email/email_tool.py new file mode 100644 index 00000000..5d4921ad --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/email/email_tool.py @@ -0,0 +1,21 @@ +from db_repo_module.db_repo_container import DatabaseModuleContainer +from user_management_module.user_container import UserContainer + + +async def send_email(email_id: str, email_subject: str, email_body: str): + # setting up the containers + db_repo_container = DatabaseModuleContainer() + user_module_container = UserContainer( + db_client=db_repo_container.db_client, + cache_manager=db_repo_container.cache_manager, + ) + + # setting up the emial part + email_response = user_module_container.email_service().send_email( + subject=email_subject, body=email_body, email_id=email_id + ) + if email_response: + return 'A password reset link has been sent to your registered email address.' + + else: + return 'An error occurred while sending the email. Please verify your email address and try again later.' diff --git a/wavefront/server/modules/tools_module/tools_module/interfaces/tool_details_provider.py b/wavefront/server/modules/tools_module/tools_module/interfaces/tool_details_provider.py new file mode 100644 index 00000000..d0ca0e2a --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/interfaces/tool_details_provider.py @@ -0,0 +1,35 @@ +from abc import ABC, abstractmethod +from typing import List, Dict, Any +from tools_module.models.tool_schemas import ToolExecutionDetails + + +class ToolDetailsProvider(ABC): + """Interface for providing tool details""" + + @abstractmethod + async def get_tool_details( + self, tool_metadata: Dict[str, Any] + ) -> List[ToolExecutionDetails]: + """ + Get details for a specific tool based on its metadata. + + Args: + tool_metadata: The metadata of the tool from available_tools.json + + Returns: + List of tool details (can be multiple if expanded like datasources) + """ + pass + + @abstractmethod + def can_handle(self, category: str) -> bool: + """ + Check if this provider can handle the given tool category. + + Args: + category: The category of the tool + + Returns: + True if this provider handles the category, False otherwise + """ + pass diff --git a/wavefront/server/modules/tools_module/tools_module/knowlegebase/knowledge_base_tools.py b/wavefront/server/modules/tools_module/tools_module/knowlegebase/knowledge_base_tools.py new file mode 100644 index 00000000..593db659 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/knowlegebase/knowledge_base_tools.py @@ -0,0 +1,45 @@ +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer + +from db_repo_module.db_repo_container import DatabaseModuleContainer + + +async def querying_knowlegebase( + kb_id: str, + inference_id: str, + question: str, +): + # Your implementation here + db_repo_container = DatabaseModuleContainer() + db_client = db_repo_container.db_client + cache_manager = db_repo_container.cache_manager + knowlegebase_contaoiner = KnowledgeBaseContainer( + db_client=db_client, + cache_manager=cache_manager, + ) + + if not question: + return 'Query should be not be empty' + existing_kb = await knowlegebase_contaoiner.knowledge_base_repository().find_one( + id=kb_id + ) + if not existing_kb: + return 'Knowledge Base with the mentioned id doesnt exist' + existing_inference = ( + await knowlegebase_contaoiner.kb_inference_repository().find_one( + knowledge_base_id=kb_id, inference_id=inference_id + ) + ) + if not existing_inference: + return 'Knowledge Base inference with the mentioned knowledge_base_id and inference_id doesnt exist' + else: + prompt = existing_inference.inference_content + response = await knowlegebase_contaoiner.knowledge_base_retrieve().query( + question, + kb_id, + prompt, + None, + None, + None, + None, + ) + return response diff --git a/wavefront/server/modules/tools_module/tools_module/knowlegebase/provider.py b/wavefront/server/modules/tools_module/tools_module/knowlegebase/provider.py new file mode 100644 index 00000000..8fb5af5d --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/knowlegebase/provider.py @@ -0,0 +1,76 @@ +from typing import List, Dict, Any +from tools_module.interfaces.tool_details_provider import ToolDetailsProvider +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.knowledge_bases import KnowledgeBase +from db_repo_module.models.kb_inferences import KnowledgeBaseInferences +from tools_module.models.tool_schemas import ToolExecutionDetails + + +class KnowledgeBaseToolDetailsProvider(ToolDetailsProvider): + def __init__( + self, + knowledge_base_repository: SQLAlchemyRepository[KnowledgeBase], + knowledge_base_inference_repository: SQLAlchemyRepository[ + KnowledgeBaseInferences + ], + ): + self.knowledge_base_repository = knowledge_base_repository + self.knowledge_base_inference_repository = knowledge_base_inference_repository + + def can_handle(self, category: str) -> bool: + return ( + category == 'knowlegebase' or category == 'knowledgebase' + ) # Handle both spellings if needed, but json says 'knowlegebase' + + async def get_tool_details( + self, tool_metadata: Dict[str, Any] + ) -> List[ToolExecutionDetails]: + tool_details = [] + prefill_values = tool_metadata.get('prefill_values', []) + + if 'kb_id' in prefill_values and 'inference_id' in prefill_values: + all_knowledge_bases = await self.knowledge_base_repository.find() + all_knowledge_bases = [kb.to_dict() for kb in all_knowledge_bases] + + all_knoledge_base_inferences = ( + await self.knowledge_base_inference_repository.find() + ) + all_knoledge_base_inferences = [ + inf.to_dict() for inf in all_knoledge_base_inferences + ] + + for kb in all_knowledge_bases: + kb_id = str(kb['id']) + for inference in all_knoledge_base_inferences: + inference_kb_id = str(inference['knowledge_base_id']) + if inference_kb_id == kb_id: + tool_details.append( + ToolExecutionDetails( + name=tool_metadata['name'], + prefill_parameter_names=prefill_values, + prefilled_value={ + 'kb_id': kb_id, + 'inference_id': str(inference['inference_id']), + }, + resource_name=kb['name'], + required=tool_metadata.get('required', []), + parameters=tool_metadata['parameters'], + description=tool_metadata['description'], + category=tool_metadata['category'], + ) + ) + else: + tool_details.append( + ToolExecutionDetails( + name=tool_metadata['name'], + resource_name='', + prefill_parameter_names=prefill_values, + prefilled_value={}, + required=tool_metadata.get('required', []), + parameters=tool_metadata['parameters'], + description=tool_metadata['description'], + category=tool_metadata['category'], + ) + ) + + return tool_details diff --git a/wavefront/server/modules/tools_module/tools_module/models/tool_schemas.py b/wavefront/server/modules/tools_module/tools_module/models/tool_schemas.py new file mode 100644 index 00000000..78e829d8 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/models/tool_schemas.py @@ -0,0 +1,114 @@ +"""Pydantic schemas for tools module API responses and requests""" + +from typing import Any, Dict, List +from pydantic import BaseModel, Field + + +# ===== Response Data Models (Inner data field content) ===== + + +class ToolDetail(BaseModel): + name: str = Field(..., description='Name of the tool') + description: str = Field(..., description='Description of what the tool does') + category: str = Field(..., description='Category of the tool') + parameters: Dict[str, Any] = Field(..., description='Tool parameters schema') + required: List[str] = Field(default_factory=list, description='Required parameters') + prefill_values: List[str] = Field( + default_factory=list, description='Pre-filled parameter values' + ) + + +class ToolsListData(BaseModel): + tools: Dict[str, ToolDetail] = Field( + ..., + description='Dictionary mapping tool names to their metadata', + examples=[ + { + 'bigquery_test_connection': { + 'name': 'bigquery_test_connection', + 'description': 'Test BigQuery connection', + 'category': 'datasource', + 'parameters': {}, + 'required': [], + 'prefill_values': [], + } + } + ], + ) + count: int = Field(..., description='Total number of tools', examples=[10]) + + +class ToolNamesData(BaseModel): + tool_names: List[str] = Field( + ..., + description='List of available tool names', + examples=[['bigquery_test_connection', 'bigquery_fetch_data']], + ) + count: int = Field(..., description='Total number of tools', examples=[2]) + + +class ToolExecutionDetails(BaseModel): + name: str = Field(..., description='Name of the tool') + resource_name: str = Field( + default='', description='Name of the resource (e.g. Datasource name)' + ) + prefill_parameter_names: List[str] = Field( + default_factory=list, description='Names of parameters that are pre-filled' + ) + prefilled_value: Dict[str, Any] = Field( + default_factory=dict, + description='Map of parameter names to their pre-filled values', + ) + required: List[str] = Field(default_factory=list, description='Required parameters') + parameters: Dict[str, Any] = Field(..., description='Tool parameters schema') + description: str = Field(..., description='Description of what the tool does') + category: str = Field(..., description='Category of the tool') + + +class ToolDetailsData(BaseModel): + tool_details: List[ToolExecutionDetails] = Field( + ..., description='List of detailed tool information' + ) + count: int = Field(..., description='Total number of tools', examples=[5]) + + +class ToolMetadataData(BaseModel): + tool: ToolDetail = Field( + ..., + description='Metadata for a specific tool', + examples=[ + { + 'name': 'bigquery_test_connection', + 'description': 'Test BigQuery connection', + 'category': 'datasource', + 'parameters': {}, + 'required': [], + 'prefill_values': [], + } + ], + ) + + +class ValidationResult(BaseModel): + valid_tools: List[str] = Field(..., description='List of valid tool names') + missing_tools: List[str] = Field( + ..., description='List of missing/invalid tool names' + ) + all_valid: bool = Field(..., description='Whether all tools are valid') + total_checked: int = Field(..., description='Total number of tools checked') + + +class ValidationResultData(BaseModel): + validation_result: ValidationResult = Field(..., description='Validation results') + + +# ===== Request Models ===== + + +class ValidateToolsRequest(BaseModel): + tool_names: List[str] = Field( + ..., + description='List of tool names to validate', + examples=[['bigquery_test_connection', 'bigquery_fetch_data']], + min_length=1, + ) diff --git a/wavefront/server/modules/tools_module/tools_module/registry/function_node_adapter.py b/wavefront/server/modules/tools_module/tools_module/registry/function_node_adapter.py new file mode 100644 index 00000000..b61a8d21 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/function_node_adapter.py @@ -0,0 +1,222 @@ +""" +Function Node Adapter + +Provides adapters to make registry functions compatible with function node signatures +without modifying the original function code. + +Function nodes expect this signature: + async def fn( + inputs: List[BaseMessage] = None, + variables: Optional[Dict[str, Any]] = None, + **kwargs, + ) -> str: + +Registry functions have their own signatures (e.g., async def bigquery_test_connection(datasource_id: str) -> str) + +This adapter extracts parameters from inputs/variables and calls the original function. +""" + +import json +import inspect +from types import FunctionType +from typing import List, Optional, Dict, Any, Callable, Awaitable +from flo_ai import BaseMessage +from flo_utils.utils.log import logger +from flo_ai import FloUtils + + +def extract_function_params( + inputs: Optional[List[BaseMessage]] = None, + variables: Optional[Dict[str, Any]] = None, + **kwargs, +) -> Dict[str, Any]: + """ + Extract function parameters from inputs and variables. + + Parameters are extracted with this priority (higher priority overrides lower): + 1. kwargs (highest priority) + 2. variables dict + 3. inputs (lowest priority - extracted from last message as JSON) + + Args: + inputs: List of BaseMessage objects, typically the last one contains function params + variables: Dictionary of variables that may contain function parameters + **kwargs: Additional keyword arguments (highest priority) + + Returns: + Dictionary of extracted parameters + """ + params = {} + + if inputs: + last_input = inputs[-1] + if hasattr(last_input, 'content') and isinstance(last_input.content, str): + try: + params.update( + FloUtils.extract_jsons_from_string(last_input.content, strict=True) + ) + except (json.JSONDecodeError, TypeError, ValueError): + raise ValueError( + f'Invalid JSON: {last_input.content}. Function node input must be a JSON object.' + ) + + if variables: + params.update(variables) + + params.update(kwargs) + return params + + +def _build_call_kwargs( + param_names: List[str], + all_params: Dict[str, Any], + kwargs: Dict[str, Any], + excluded_params: set, +) -> Dict[str, Any]: + """Build keyword arguments for calling the original function.""" + call_kwargs = {} + for param_name in param_names: + if param_name in excluded_params: + continue + if param_name in kwargs: + call_kwargs[param_name] = kwargs[param_name] + elif param_name in all_params: + call_kwargs[param_name] = all_params[param_name] + return call_kwargs + + +def _validate_required_params( + sig: inspect.Signature, + call_kwargs: Dict[str, Any], + function_name: str, + excluded_params: set, +) -> None: + """Validate that all required parameters are present.""" + required_params = [ + param_name + for param_name, param in sig.parameters.items() + if param_name not in excluded_params + and param.default == inspect.Parameter.empty + ] + + missing_params = [param for param in required_params if param not in call_kwargs] + if missing_params: + error_msg = ( + f"Function '{function_name}' called with missing required parameters.\n" + f'Missing parameters: {missing_params}.\n' + f'Make sure last message contains all missing parameters as a JSON object.\n' + f'Required parameters: {required_params}.\n' + f'Provided parameters: {list(call_kwargs.keys())}.\n' + ) + logger.error(error_msg) + raise ValueError(error_msg) + + +def _convert_result_to_string(result: Any) -> str: + """Convert function result to string.""" + if result is None: + return '' + if isinstance(result, str): + return result + if isinstance(result, (dict, list)): + return json.dumps(result) + return str(result) + + +def create_function_node_adapter( + original_function: FunctionType, + function_name: str, +) -> Callable[..., Awaitable[str]]: + """ + Create an adapter function that wraps a registry function to work as a function node. + + The adapter: + 1. Accepts the function node signature (inputs, variables, **kwargs) + 2. Extracts parameters from inputs/variables + 3. Calls the original function with the extracted parameters + 4. Converts the result to a string + + Args: + original_function: The original registry function to wrap + function_name: Name of the function (for logging/error messages) + + Returns: + An async function with the function node signature + """ + sig = inspect.signature(original_function) + param_names = list(sig.parameters.keys()) + excluded_params = {'inputs', 'variables'} + is_async = inspect.iscoroutinefunction(original_function) + + async def adapted_function( + inputs: Optional[List[BaseMessage]] = None, + variables: Optional[Dict[str, Any]] = None, + **kwargs, + ) -> str: + """ + Adapted function that works as a function node. + + Args: + inputs: List of BaseMessage objects containing function parameters + variables: Dictionary of variables that may contain function parameters + **kwargs: Additional keyword arguments + + Returns: + String result of the function execution + """ + try: + all_params = extract_function_params(inputs, variables, **kwargs) + call_kwargs = _build_call_kwargs( + param_names, all_params, kwargs, excluded_params + ) + _validate_required_params(sig, call_kwargs, function_name, excluded_params) + + result = ( + await original_function(**call_kwargs) + if is_async + else original_function(**call_kwargs) + ) + return _convert_result_to_string(result) + + except ValueError: + raise + except Exception as e: + error_msg = f"Error executing function '{function_name}': {str(e)}" + logger.error(error_msg) + raise Exception(error_msg) from e + + adapted_function.__name__ = f'{original_function.__name__}_node_adapter' + adapted_function.__doc__ = ( + f"Function node adapter for {function_name}.\n\n" + f"Original function: {original_function.__name__}\n" + f"Original docstring: {original_function.__doc__ or 'No docstring'}" + ) + + return adapted_function + + +def get_function_node_adapter( + function_name: str, + function_registry: Optional[Dict[str, FunctionType]] = None, +) -> Optional[Callable]: + """ + Get a function node adapter for a function from the registry. + + Args: + function_name: Name of the function in the registry + function_registry: Optional custom registry dict. If None, uses FUNCTION_REGISTRY + + Returns: + Adapted function with function node signature, or None if function not found + """ + if function_registry is None: + from tools_module.registry.function_registry import FUNCTION_REGISTRY + + function_registry = FUNCTION_REGISTRY + + original_function = function_registry.get(function_name) + if original_function is None: + logger.warning(f"Function '{function_name}' not found in registry") + return None + + return create_function_node_adapter(original_function, function_name) diff --git a/wavefront/server/modules/tools_module/tools_module/registry/function_node_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/function_node_registry.py new file mode 100644 index 00000000..c6cd56aa --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/function_node_registry.py @@ -0,0 +1,69 @@ +""" +Function Node Registry + +Provides function node-compatible versions of all registry functions. +These functions can be used directly as function nodes in workflows without +modifying the original registry functions. + +Usage: + from tools_module.registry.function_node_registry import FUNCTION_NODE_REGISTRY + + # Get an adapted function + adapted_fn = FUNCTION_NODE_REGISTRY.get('bigquery_test_connection') + + # Use it as a function node + result = await adapted_fn( + inputs=[...], + variables={'datasource_id': 'my-datasource'}, + ) +""" + +from typing import Dict, Callable, Optional +from tools_module.registry.function_registry import FUNCTION_REGISTRY +from tools_module.registry.function_node_adapter import create_function_node_adapter + + +def _create_function_node_registry() -> Dict[str, Callable]: + """ + Create a registry of function node adapters for all registry functions. + + Returns: + Dictionary mapping function names to their adapted versions + """ + node_registry = {} + + for function_name, original_function in FUNCTION_REGISTRY.items(): + adapted_function = create_function_node_adapter( + original_function, + function_name, + ) + node_registry[function_name] = adapted_function + + return node_registry + + +# Registry of function node-compatible functions +FUNCTION_NODE_REGISTRY = _create_function_node_registry() + + +def get_function_node(function_name: str) -> Optional[Callable]: + """ + Get a function node adapter for a specific function. + + Args: + function_name: Name of the function + + Returns: + Adapted function with function node signature, or None if not found + """ + return FUNCTION_NODE_REGISTRY.get(function_name) + + +def get_all_function_node_names() -> list[str]: + """ + Get list of all available function node names. + + Returns: + List of function names available as function nodes + """ + return list(FUNCTION_NODE_REGISTRY.keys()) diff --git a/wavefront/server/modules/tools_module/tools_module/registry/function_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/function_registry.py new file mode 100644 index 00000000..f89b7f42 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/function_registry.py @@ -0,0 +1,46 @@ +""" +Main Function Registry + +Aggregates all function registries from different categories into a single registry +for use by the ToolLoader. +""" + +from tools_module.registry.registries.datasource_registry import DATASOURCE_REGISTRY +from tools_module.registry.registries.knowledge_base_registry import ( + KNOWLEDGE_BASE_REGISTRY, +) +from tools_module.registry.registries.email_registry import EMAIL_REGISTRY +from tools_module.registry.registries.util_function_registry import ( + UTIL_FUNCTION_REGISTRY, +) + + +# TODO: Import other category registries as they are implemented +# Master registry combining all function categories + + +def _merge_registries(*registries): + """Merge registries with collision detection""" + merged = {} + for registry in registries: + for key, value in registry.items(): + if key in merged: + raise ValueError( + f"Duplicate function name '{key}' found across registries" + ) + merged[key] = value + return merged + + +FUNCTION_REGISTRY = _merge_registries( + DATASOURCE_REGISTRY, + KNOWLEDGE_BASE_REGISTRY, + EMAIL_REGISTRY, + UTIL_FUNCTION_REGISTRY, +) + + +# Helper function to get all available function names +def get_available_function_names(): + """Get list of all available function names""" + return list(FUNCTION_REGISTRY.keys()) diff --git a/wavefront/server/modules/tools_module/tools_module/registry/registries/datasource_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/registries/datasource_registry.py new file mode 100644 index 00000000..3afeb4d6 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/registries/datasource_registry.py @@ -0,0 +1,31 @@ +""" +Datasource Tools Registry + +Contains all datasource-related tools including BigQuery, PostgreSQL, MySQL, etc. +""" + +from tools_module.datasources.bigquery_tools import ( + bigquery_test_connection, + bigquery_get_schema, + bigquery_get_table_names, + bigquery_insert_rows, + bigquery_execute_query, +) + +# BigQuery Tools Registry +BIGQUERY_REGISTRY = { + 'bigquery_test_connection': bigquery_test_connection, + 'bigquery_get_schema': bigquery_get_schema, + 'bigquery_get_table_names': bigquery_get_table_names, + 'bigquery_insert_rows': bigquery_insert_rows, + 'bigquery_execute_query': bigquery_execute_query, +} + +# TODO: Add other datasource registries as they are implemented +# REDSHIFT_REGISTRY = {} + +# Combined datasource registry +DATASOURCE_REGISTRY = { + **BIGQUERY_REGISTRY, + # **REDSHIFT_REGISTRY, +} diff --git a/wavefront/server/modules/tools_module/tools_module/registry/registries/email_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/registries/email_registry.py new file mode 100644 index 00000000..4a56f0c0 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/registries/email_registry.py @@ -0,0 +1,3 @@ +from tools_module.email.email_tool import send_email + +EMAIL_REGISTRY = {'send_email': send_email} diff --git a/wavefront/server/modules/tools_module/tools_module/registry/registries/knowledge_base_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/registries/knowledge_base_registry.py new file mode 100644 index 00000000..1431c14e --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/registries/knowledge_base_registry.py @@ -0,0 +1,5 @@ +from tools_module.knowlegebase.knowledge_base_tools import querying_knowlegebase + +KNOWLEDGE_BASE_REGISTRY = { + 'querying_knowlegebase': querying_knowlegebase, +} diff --git a/wavefront/server/modules/tools_module/tools_module/registry/registries/util_function_registry.py b/wavefront/server/modules/tools_module/tools_module/registry/registries/util_function_registry.py new file mode 100644 index 00000000..ccf91190 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/registries/util_function_registry.py @@ -0,0 +1,7 @@ +from tools_module.utils.message_processor_fn import execute_message_processor_fn +from tools_module.utils.api_service_fn import execute_api_service_fn + +UTIL_FUNCTION_REGISTRY = { + 'message_processor': execute_message_processor_fn, + 'rf_api_service': execute_api_service_fn, +} diff --git a/wavefront/server/modules/tools_module/tools_module/registry/tool_loader.py b/wavefront/server/modules/tools_module/tools_module/registry/tool_loader.py new file mode 100644 index 00000000..e6dc0dd7 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/registry/tool_loader.py @@ -0,0 +1,97 @@ +import json +import os +from typing import Dict, List, Optional +from flo_ai.tool.base_tool import Tool +from tools_module.registry.function_registry import FUNCTION_REGISTRY + + +class ToolLoader: + """Handles loading and management of tools from the registry""" + + def __init__(self, tools_json_path: Optional[str] = None): + """ + Initialize tool loader + + Args: + tools_json_path: Path to available_tools.json file + """ + if tools_json_path is None: + # Default to available_tools.json in the module root + current_dir = os.path.dirname(os.path.dirname(__file__)) + tools_json_path = os.path.join(current_dir, 'available_tools.json') + + self.tools_json_path = tools_json_path + self._tools_metadata = None + + def _load_tools_metadata(self) -> Dict: + """Load tools metadata from JSON file""" + if self._tools_metadata is None: + with open(self.tools_json_path, 'r') as f: + self._tools_metadata = json.load(f) + return self._tools_metadata + + def get_available_tools(self) -> Dict: + """Get all available tools metadata""" + return self._load_tools_metadata() + + def get_tool_names(self) -> List[str]: + """Get list of available tool names""" + return list(self._load_tools_metadata().keys()) + + def get_tool_metadata(self, tool_name: str) -> Optional[Dict]: + """Get metadata for a specific tool""" + tools_metadata = self._load_tools_metadata() + return tools_metadata.get(tool_name) + + def load_tool(self, tool_name: str) -> Optional[Tool]: + """ + Load a specific tool by name + + Args: + tool_name: Name of the tool to load + + Returns: + flo_ai.Tool instance or None if tool not found + """ + # Get tool metadata + tool_metadata = self.get_tool_metadata(tool_name) + if not tool_metadata: + return None + + # Get tool function from registry + tool_function = FUNCTION_REGISTRY.get(tool_name) + if not tool_function: + return None + + # Create Tool instance + return Tool( + name=tool_metadata['name'], + description=tool_metadata['description'], + function=tool_function, + parameters=tool_metadata['parameters'], + ) + + def load_tools(self, tool_names: List[str]) -> List[Tool]: + """ + Load multiple tools by names + + Args: + tool_names: List of tool names to load + + Returns: + List of flo_ai.Tool instances + """ + tools = [] + for tool_name in tool_names: + tool = self.load_tool(tool_name) + if tool: + tools.append(tool) + return tools + + def load_all_tools(self) -> List[Tool]: + """Load all available tools""" + return self.load_tools(self.get_tool_names()) + + def load_tool_with_name(self, tool_name: str) -> Optional[Tool]: + """Load a tool by name""" + return self.load_tool(tool_name) diff --git a/wavefront/server/modules/tools_module/tools_module/services/default_tool_provider.py b/wavefront/server/modules/tools_module/tools_module/services/default_tool_provider.py new file mode 100644 index 00000000..af6eb1c9 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/services/default_tool_provider.py @@ -0,0 +1,24 @@ +from typing import List, Dict, Any +from tools_module.interfaces.tool_details_provider import ToolDetailsProvider +from tools_module.models.tool_schemas import ToolExecutionDetails + + +class DefaultToolDetailsProvider(ToolDetailsProvider): + def can_handle(self, category: str) -> bool: + return True # Fallback for everything else + + async def get_tool_details( + self, tool_metadata: Dict[str, Any] + ) -> List[ToolExecutionDetails]: + return [ + ToolExecutionDetails( + name=tool_metadata['name'], + resource_name='', + prefill_parameter_names=tool_metadata.get('prefill_values', []), + prefilled_value={}, + required=tool_metadata.get('required', []), + parameters=tool_metadata['parameters'], + description=tool_metadata['description'], + category=tool_metadata.get('category', ''), + ) + ] diff --git a/wavefront/server/modules/tools_module/tools_module/services/tool_service.py b/wavefront/server/modules/tools_module/tools_module/services/tool_service.py new file mode 100644 index 00000000..43b3824b --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/services/tool_service.py @@ -0,0 +1,124 @@ +from typing import Dict, List, Optional +from tools_module.registry.tool_loader import ToolLoader +from tools_module.interfaces.tool_details_provider import ToolDetailsProvider +from tools_module.models.tool_schemas import ToolExecutionDetails + + +class ToolService: + """Service for managing tools and providing API endpoints""" + + def __init__( + self, tool_loader: ToolLoader, tool_providers: List[ToolDetailsProvider] + ): + self.tool_loader = tool_loader + self.tool_providers = tool_providers + + def get_available_tools(self) -> Dict: + """ + Get all available tools with their metadata + + Returns: + Dictionary containing all tool definitions with parameters and descriptions + """ + return self.tool_loader.get_available_tools() + + def get_tool_names(self) -> List[str]: + """ + Get list of available tool names + + Returns: + List of tool names + """ + return self.tool_loader.get_tool_names() + + def get_tool_metadata(self, tool_name: str) -> Optional[Dict]: + """ + Get metadata for a specific tool + + Args: + tool_name: Name of the tool + + Returns: + Tool metadata dictionary or None if not found + """ + return self.tool_loader.get_tool_metadata(tool_name) + + def get_tools_by_category(self, category: str) -> Dict: + """ + Get tools filtered by category + + Args: + category: Category to filter by (e.g., 'datasource') + + Returns: + Dictionary of tools in the specified category + """ + all_tools = self.get_available_tools() + filtered_tools = {} + + for tool_name, tool_data in all_tools.items(): + if tool_data.get('category') == category: + filtered_tools[tool_name] = tool_data + + return filtered_tools + + def validate_tool_exists(self, tool_name: str) -> bool: + """ + Check if a tool exists + + Args: + tool_name: Name of the tool to check + + Returns: + True if tool exists, False otherwise + """ + return tool_name in self.get_tool_names() + + def validate_tools_exist(self, tool_names: List[str]) -> List[str]: + """ + Validate multiple tool names and return any missing ones + + Args: + tool_names: List of tool names to validate + + Returns: + List of tool names that don't exist + """ + available_tools = set(self.get_tool_names()) + missing_tools = [] + + for tool_name in tool_names: + if tool_name not in available_tools: + missing_tools.append(tool_name) + + return missing_tools + + async def get_all_tool_details(self) -> List[ToolExecutionDetails]: + """ + Get details for all available tools using registered providers + + Returns: + List of tool details + """ + tool_metadata = self.get_available_tools() + all_tool_details = [] + + for tool_name, tool_data in tool_metadata.items(): + category = tool_data.get('category', '') + + # Find the first provider that can handle this category + # We prioritize specific providers over the default one + # Assuming providers are ordered with specific ones first + handled = False + for provider in self.tool_providers: + if provider.can_handle(category): + details = await provider.get_tool_details(tool_data) + all_tool_details.extend(details) + handled = True + break + + if not handled: + # Should not happen if DefaultToolDetailsProvider is last + pass + + return all_tool_details diff --git a/wavefront/server/modules/tools_module/tools_module/tools_container.py b/wavefront/server/modules/tools_module/tools_module/tools_container.py new file mode 100644 index 00000000..fc34f76a --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/tools_container.py @@ -0,0 +1,46 @@ +from dependency_injector import containers +from dependency_injector import providers +from tools_module.registry.tool_loader import ToolLoader +from tools_module.services.tool_service import ToolService + + +from tools_module.datasources.provider import DatasourceToolDetailsProvider +from tools_module.knowlegebase.provider import KnowledgeBaseToolDetailsProvider +from tools_module.services.default_tool_provider import DefaultToolDetailsProvider + + +class ToolsContainer(containers.DeclarativeContainer): + """Dependency injection container for tools module""" + + datasource_repository = providers.Dependency() + knowledge_base_repository = providers.Dependency() + knowledge_base_inference_repository = providers.Dependency() + # Tool loader + tool_loader = providers.Singleton( + ToolLoader, + tools_json_path=None, # Uses default path + ) + + # Tool Providers + datasource_tool_provider = providers.Singleton( + DatasourceToolDetailsProvider, datasource_repository=datasource_repository + ) + + knowledge_base_tool_provider = providers.Singleton( + KnowledgeBaseToolDetailsProvider, + knowledge_base_repository=knowledge_base_repository, + knowledge_base_inference_repository=knowledge_base_inference_repository, + ) + + default_tool_provider = providers.Singleton(DefaultToolDetailsProvider) + + # Tool service + tool_service = providers.Singleton( + ToolService, + tool_loader=tool_loader, + tool_providers=providers.List( + datasource_tool_provider, + knowledge_base_tool_provider, + default_tool_provider, + ), + ) diff --git a/wavefront/server/modules/tools_module/tools_module/utils/api_service_fn.py b/wavefront/server/modules/tools_module/tools_module/utils/api_service_fn.py new file mode 100644 index 00000000..6a437300 --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/utils/api_service_fn.py @@ -0,0 +1,40 @@ +import json +from typing import Optional, Dict, Any +from api_services_module.execution.execute import execute_api_service + + +async def execute_api_service_fn( + api_service_id: str, + api_id: str, + api_version: str = 'v1', + headers: Optional[Dict[str, str]] = None, + payload: Optional[Dict[str, Any]] = None, + query_params: Optional[Dict[str, Any]] = None, + path_params: Optional[Dict[str, Any]] = None, + variables: Optional[Dict[str, Any]] = None, +) -> str: + """Process a message using the message processor function""" + + headers = headers or {} + if not any(k.lower() == 'content-type' for k in headers.keys()): + headers['content-type'] = 'application/json' + + response = await execute_api_service( + api_service_id=api_service_id, + api_id=api_id, + api_version=api_version, + payload=payload, + query_params=query_params, + path_params=path_params, + headers=headers, + ) + + data = response.data + if isinstance(data, str): + return data + if data is None: + return '' + try: + return json.dumps(data) + except TypeError: + return str(data) diff --git a/wavefront/server/modules/tools_module/tools_module/utils/message_processor_fn.py b/wavefront/server/modules/tools_module/tools_module/utils/message_processor_fn.py new file mode 100644 index 00000000..11189dcf --- /dev/null +++ b/wavefront/server/modules/tools_module/tools_module/utils/message_processor_fn.py @@ -0,0 +1,43 @@ +from typing import Dict, Any +import json +from plugins_module.controllers.message_processor_controller import ( + execute_message_processor, + ExecuteMessageProcessorPayload, +) + + +async def execute_message_processor_fn( + message_processor_id: str, + input_data: Dict[str, Any], +) -> str: + """Process a message using the message processor function + + Args: + message_processor_id: The ID of the message processor to execute + input_data: The input data to pass to the message processor (dict of key-value pairs) + + Returns: + The result from the message processor execution as a string + """ + + payload = ExecuteMessageProcessorPayload(input_data=input_data) + response = await execute_message_processor(message_processor_id, payload) + + response_body_bytes = response.body + response_body = json.loads(response_body_bytes.decode('utf-8')) + + # Check if there's an error in the response + meta = response_body.get('meta', {}) + if meta.get('status') == 'failure': + error_msg = meta.get('error', 'Unknown error') + raise Exception(f'Message processor execution failed: {error_msg}') + + data = response_body.get('data') + if data is None: + raise Exception('Message processor response has no data field') + + result = data.get('result') + if result is None: + raise Exception('Message processor response data has no result field') + + return result diff --git a/wavefront/server/modules/user_management_module/pyproject.toml b/wavefront/server/modules/user_management_module/pyproject.toml new file mode 100644 index 00000000..cd2a7a88 --- /dev/null +++ b/wavefront/server/modules/user_management_module/pyproject.toml @@ -0,0 +1,48 @@ +[project] +name = "user-management-module" +version = "0.0.1" +description = "User Management Module" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "auth-module", + "db-repo-module", + + "pydantic[email]>=2.9.2,<3.0.0", + "dependency-injector>=4.42.0,<5.0.0", + "fastapi>=0.115.2,<1.0.0", + "authlib>=1.3.2,<2.0.0", + "bcrypt>=4.2.1,<5.0.0", + "google-auth>=2.0.0,<3.0.0", + "google-auth-httplib2>=0.2.0,<1.0.0", + "google-api-python-client>=2.0.0,<3.0.0" +] + +[tool.uv.sources] +auth-module = { workspace = true } +db-repo-module = { workspace = true } + +[dependency-groups] +dev = [ + "pytest>=8.3.4,<9.0.0", + "pytest-asyncio>=0.24.0,<1.0.0", + "asyncpg>=0.30.0,<1.0.0", + "testing-postgresql>=1.3.0,<2.0.0" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["user_management_module"] diff --git a/wavefront/server/modules/user_management_module/tests/conftest.py b/wavefront/server/modules/user_management_module/tests/conftest.py new file mode 100644 index 00000000..102d1876 --- /dev/null +++ b/wavefront/server/modules/user_management_module/tests/conftest.py @@ -0,0 +1,326 @@ +import json +from unittest.mock import Mock +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from common_module.common_container import CommonContainer +from common_module.middleware.request_id_middleware import RequestIdMiddleware +from db_repo_module.database.base import Base +from db_repo_module.db_repo_container import DatabaseModuleContainer +from fastapi import FastAPI +from fastapi.testclient import TestClient +import pytest +from sqlalchemy.ext.asyncio import async_sessionmaker +from sqlalchemy.ext.asyncio import create_async_engine +import testing.postgresql +from user_management_module.authorization.require_auth import RequireAuthMiddleware +from user_management_module.router import user_management_router +from user_management_module.user_container import UserContainer +from io import BytesIO +from knowledge_base_module.knowledge_base_container import KnowledgeBaseContainer +from dependency_injector import providers + + +class MockDbClient: + def __init__(self, engine, session_factory): + self._engine = engine + self.session = session_factory + + +@pytest.fixture +async def test_engine(): + with testing.postgresql.Postgresql() as postgresql: + database_url = postgresql.url() + + async_database_url = database_url.replace( + 'postgresql://', 'postgresql+psycopg://' + ) + + engine = create_async_engine(async_database_url) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + await engine.dispose() + + +@pytest.fixture +async def test_session(test_engine): + async_session = async_sessionmaker(autocommit=False, bind=test_engine) + yield async_session + + +@pytest.fixture +def test_user_id(): + """Fixture to provide a consistent test user ID.""" + return str(uuid4()) + + +@pytest.fixture +def test_session_id(): + """Fixture to provide a consistent test session ID.""" + return str(uuid4()) + + +@pytest.fixture +def mock_config(): + """Fixture to provide mock config for testing.""" + return { + 'web': {'url': 'http://test.example.com'}, + 'auth': { + 'max_failed_attempts': '3', + 'lockout_duration_hours': '24', + 'inactive_days_threshold': '60', + }, + } + + +@pytest.fixture +def setup_containers( + test_engine, test_session, test_user_id, test_session_id, mock_config +): + db_repo_container = DatabaseModuleContainer() + mock_db_client = MockDbClient(test_engine, test_session) + db_repo_container.db_client.override(mock_db_client) + + common_container = CommonContainer() + + cache_manager_mock = Mock() + # For session data + cache_manager_mock.get_str.return_value = json.dumps( + {'user_id': test_user_id, 'device_info': 'Mozilla/5.0'} + ) + # For reset password + cache_manager_mock.get_str.side_effect = ( + lambda key: test_user_id + if key == 'mock_reset_code' + else json.dumps({'user_id': test_user_id, 'device_info': 'Mozilla/5.0'}) + ) + cache_manager_mock.add = Mock() + common_container.cache_manager.override(cache_manager_mock) + + # Mock token service + mock_token_service = Mock() + mock_token_service.create_token.return_value = 'mock_token' + mock_token_service.decode_token.return_value = { + 'sub': 'test@example.com', + 'user_id': test_user_id, + 'role_id': 'test_role_id', + 'session_id': test_session_id, + 'code': 'mock_reset_code', + } + mock_token_service.token_expiry = 3600 + mock_token_service.temporary_token_expiry = 600 + + auth_container = AuthContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + auth_container.token_service.override(mock_token_service) + + # mocking auth container superset_service + mock_superset_service = Mock() + mock_superset_service.generate_guest_token.return_value = 'mock_guest_token' + auth_container.superset_service.override(mock_superset_service) + + user_container = UserContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + user_container.config.override(mock_config) + + # Mock email service + # Mock email service + mock_email_service = Mock() + mock_email_service.send_forget_password_email.return_value = True + user_container.email_service.override(mock_email_service) + + # Setup KnowledgeBaseContainer for auth_module.controllers (outlook_controller) + knowledge_base_container = KnowledgeBaseContainer( + db_client=db_repo_container.db_client, + cache_manager=cache_manager_mock, + ) + + # Mock CloudStorageManager + mock_cloud_storage = Mock() + mock_cloud_storage.save_small_file = Mock() + mock_cloud_storage.save_large_file = Mock() + mock_cloud_storage.get_file = Mock(return_value=BytesIO(b'file content')) + knowledge_base_container.cloud_storage.override( + providers.Singleton(lambda: mock_cloud_storage) + ) + + # Mock MessageQueueManager + mock_message_queue = Mock() + mock_message_queue.add_message = Mock(return_value='message_id_123') + knowledge_base_container.message_queue.override( + providers.Singleton(lambda: mock_message_queue) + ) + + # Mock config + test_kb_config_dict = { + 'cloud_config': {'cloud_provider': 'gcp'}, + 'gcp': { + 'gcp_asset_storage_bucket': 'test_bucket', + 'email_topic_id': 'test_topic', + }, + 'aws': { + 'aws_asset_storage_bucket': 'test_bucket', + 'queue_url': 'test_queue_url', + }, + } + knowledge_base_container.config.from_dict(test_kb_config_dict) + + # Wire KnowledgeBaseContainer + knowledge_base_container.wire( + packages=[ + 'auth_module.controllers', + ] + ) + + mock_email_service.send_forget_password_email.return_value = True + user_container.email_service.override(mock_email_service) + + common_container.wire( + packages=[ + 'user_management_module.controllers', + 'auth_module.controllers', + 'user_management_module.authorization', + ] + ) + auth_container.wire( + packages=[ + 'user_management_module.controllers', + 'user_management_module.authorization', + ] + ) + user_container.wire( + packages=[ + 'user_management_module.authorization', + 'user_management_module.controllers', + 'auth_module.controllers', + ] + ) + + yield auth_container, common_container, user_container + auth_container.unwire() + common_container.unwire() + + +@pytest.fixture +def test_client(setup_containers): + app = FastAPI() + app.add_middleware(RequestIdMiddleware) + app.add_middleware(RequireAuthMiddleware) + app.include_router(user_management_router, prefix='/floware') + return TestClient(app) + + +@pytest.fixture +def mock_auth_functions(monkeypatch): + async def mock_get_current_user(request): + return 'test_user_id', 'test_role_id', 'test_session_id' + + async def mock_check_is_admin(role_id): + return True + + monkeypatch.setattr( + 'auth_module.controllers.superset_controller.check_is_admin', + mock_check_is_admin, + ) + monkeypatch.setattr( + 'user_management_module.utils.user_utils.get_current_user', + mock_get_current_user, + ) + + +@pytest.fixture +def mock_auth_admin_functions(monkeypatch): + def mock_get_current_user(request): + return 'test_user_id', 'test_role_id', 'test_session_id' + + monkeypatch.setattr( + 'user_management_module.controllers.access_controller.get_current_user', + mock_get_current_user, + ) + + async def mock_check_is_admin(role_id, role_repository=None): + return True + + monkeypatch.setattr( + 'user_management_module.controllers.access_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mock_admin_false_functions(monkeypatch): + async def mock_check_is_not_admin(role_id): + return False + + monkeypatch.setattr( + 'auth_module.controllers.superset_controller.check_is_admin', + mock_check_is_not_admin, + ) + + +@pytest.fixture +def auth_token(setup_containers, test_user_id, test_session_id): + auth_container, _, _ = setup_containers + token_service = auth_container.token_service() + token = token_service.create_token( + sub='test@example.com', + user_id=test_user_id, + role_id='test_role_id', + session_id=test_session_id, + ) + return token + + +@pytest.fixture +def mock_auth_admin_user_functions(monkeypatch, test_user_id): + def mock_get_current_user(request): + return ( + 'test_role_id', + test_user_id, + 'test_session_id', + ) # Use the actual UUID from the fixture + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.get_current_user', + mock_get_current_user, + ) + + async def mock_check_is_admin(role_id, role_repository=None): + return True + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mocking_user_controller_is_admin(monkeypatch): + async def mock_check_is_admin(role_id): + return True + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.check_is_admin', + mock_check_is_admin, + ) + + +@pytest.fixture +def mocking_user_controller_get_current_user(monkeypatch, test_user_id): + def mock_get_current_user(request): + return 'wrong_role_id', test_user_id, 'test_session_id' + + monkeypatch.setattr( + 'user_management_module.controllers.user_controller.get_current_user', + mock_get_current_user, + ) diff --git a/wavefront/server/modules/user_management_module/tests/test_access_controller.py b/wavefront/server/modules/user_management_module/tests/test_access_controller.py new file mode 100644 index 00000000..e87bcddd --- /dev/null +++ b/wavefront/server/modules/user_management_module/tests/test_access_controller.py @@ -0,0 +1,179 @@ +import uuid + +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +import pytest +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from user_management_module.models.resource import AddableResourceScope + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +@pytest.mark.asyncio +async def test_create_resource( + test_client, + test_session: AsyncSession, + test_user_id, + test_session_id, + auth_token, + mock_auth_admin_functions, +): + await create_session(test_session, test_user_id, test_session_id) + resource_payload = { + 'resources': [ + { + 'key': 'test_resource', + 'value': 'Test Resource', + 'description': 'Test Description', + 'scope': AddableResourceScope.DATA, + } + ] + } + response = test_client.post( + '/floware/v1/access/resources', + json=resource_payload, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 201 + data = response.json() + assert 'Created 1 resources successfully' in data['data']['message'] + + async with test_session() as session: + result = await session.execute(select(Resource)) + resources = result.scalars().all() + assert len(resources) == 1 + assert resources[0].key == 'test_resource' + + +@pytest.mark.asyncio +async def test_create_role( + test_client, + test_session: AsyncSession, + mock_auth_admin_functions, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + + resource = Resource( + id=str(uuid.uuid4()), + key='test_resource', + value='Test Resource', + description='Test Description', + scope=ResourceScope.DASHBOARD, + ) + resource_id = resource.id + async with test_session() as session: + session.add(resource) + await session.commit() + + role_payload = { + 'name': 'test_role', + 'description': 'Test Role Description', + 'resources': [resource_id], + } + + response = test_client.post( + '/floware/v1/access/roles', + json=role_payload, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 201 + data = response.json() + assert 'Created role successfully' in data['data']['message'] + + async with test_session() as session: + result = await session.execute(select(Role)) + roles = result.scalars().all() + assert len(roles) == 1 + assert roles[0].name == 'test_role' + + +@pytest.mark.asyncio +async def test_get_roles( + test_client, + test_session: AsyncSession, + mock_auth_admin_functions, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + resource = Resource( + id=str(uuid.uuid4()), + key='test_resource', + value='Test Resource', + description='Test Description', + scope=ResourceScope.CONSOLE, + ) + role = Role( + id=str(uuid.uuid4()), name='test_role', description='Test Role Description' + ) + resource_id = resource.id + role_id = role.id + async with test_session() as session: + session.add_all([resource, role]) + await session.commit() + + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + async with test_session() as session: + session.add(role_resource) + await session.commit() + + response = test_client.get( + '/floware/v1/access/roles', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 200 + data = response.json() + assert len(data['data']['roles']) == 1 + assert data['data']['roles'][0]['name'] == 'test_role' + + +@pytest.mark.asyncio +async def test_create_role_invalid_resources( + test_client, + test_session: AsyncSession, + mock_auth_admin_functions, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + role_payload = { + 'name': 'test_role', + 'description': 'Test Role Description', + 'resources': [str(uuid.uuid4())], # Non-existent resource ID + } + + response = test_client.post( + '/floware/v1/access/roles', + json=role_payload, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 400 + data = response.json() + assert 'found 1 unknown resource(s) in the payload' in data['meta']['error'].lower() diff --git a/wavefront/server/modules/user_management_module/tests/test_auth_controller.py b/wavefront/server/modules/user_management_module/tests/test_auth_controller.py new file mode 100644 index 00000000..4c48d2eb --- /dev/null +++ b/wavefront/server/modules/user_management_module/tests/test_auth_controller.py @@ -0,0 +1,831 @@ +from datetime import datetime, timedelta, timezone +import os +from unittest.mock import Mock +from uuid import uuid4 + +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from dependency_injector import providers +import pytest +from sqlalchemy.ext.asyncio import AsyncSession +from user_management_module.utils.password_utils import hash_password + + +@pytest.mark.asyncio +async def test_authenticate(test_client, test_session: AsyncSession, test_user_id): + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + + # Hash the password before storing it + hashed_password = hash_password('test_password') + + async with test_session() as session: + # First create the user + user = User( + id=test_user_id, + email='test@example.com', + password=hashed_password, + first_name='Test', + last_name='User', + ) + session.add(user) + await session.commit() + + # Then create the role + role = Role(id=role_id, name='Test Role', description='Test Role Description') + session.add(role) + await session.commit() + + # Then create the resource + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + description='Test Resource Description', + scope=ResourceScope.CONSOLE, + ) + session.add(resource) + await session.commit() + + # Then create role-resource mapping + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + session.add(role_resource) + await session.commit() + + # Finally create user-role mapping + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add(user_role) + await session.commit() + + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + +@pytest.mark.asyncio +async def test_authenticate_invalid_role( + test_client, test_session: AsyncSession, test_user_id +): + # Hash the password before storing it + hashed_password = hash_password('test_password') + + async with test_session() as session: + # First create the user + user = User( + id=test_user_id, + email='test@example.com', + password=hashed_password, + first_name='Test', + last_name='User', + ) + session.add(user) + await session.commit() + + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'test@example.com', 'password': 'test_password'}, + ) + assert response.status_code == 403 + + +@pytest.mark.asyncio +async def test_authenticate_invalid_password( + test_client, test_session: AsyncSession, test_user_id +): + # Hash the password before storing it + hashed_password = hash_password('test_password') + + async with test_session() as session: + # First create the user + user = User( + id=test_user_id, + email='test@example.com', + password=hashed_password, + first_name='Test', + last_name='User', + ) + session.add(user) + await session.commit() + + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'test@example.com', 'password': 'invalid_password'}, + ) + + assert response.status_code == 403 + + +# testing auth logout +@pytest.mark.asyncio +async def test_logout( + test_client, auth_token, test_session: AsyncSession, test_user_id, test_session_id +): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + response = test_client.post( + '/floware/v1/logout', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 200 + + +# logout test with invalid cache +@pytest.mark.asyncio +async def test_logout_invalid_cache( + test_client, + auth_token, + test_session: AsyncSession, + test_user_id, + test_session_id, + setup_containers, +): + # Get the cache manager mock and set it to return None + _, _, user_container = setup_containers + cache_manager_mock = Mock() + cache_manager_mock.get_str = Mock(return_value=None) + user_container.cache_manager.override( + providers.Singleton(lambda: cache_manager_mock) + ) + + # Create a session in the database but not in cache + response = test_client.post( + '/floware/v1/logout', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 401 + + +@pytest.mark.asyncio +async def test_authenticate_multiple_failed_attempts_lockout( + test_client, test_session: AsyncSession, test_user_id +): + """Test that multiple failed login attempts result in account lockout""" + # Get max failed attempts from environment variable, default to 3 + max_failed_attempts = int(os.getenv('MAX_FAILED_ATTEMPTS', 3)) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('correct_password') + + async with test_session() as session: + # Create user + user = User( + id=test_user_id, + email='lockout_test@example.com', + password=hashed_password, + first_name='Test', + last_name='User', + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Perform failed login attempts up to the limit + for attempt in range(1, max_failed_attempts): + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'lockout_test@example.com', 'password': 'wrong_password'}, + ) + assert response.status_code == 403 + + # Final failed login attempt should trigger lockout + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'lockout_test@example.com', 'password': 'wrong_password'}, + ) + assert response.status_code == 423 + assert 'Account locked' in response.json()['meta']['error'] + assert 'Try again in' in response.json()['meta']['error'] + + # Even correct password should be rejected when locked + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'lockout_test@example.com', 'password': 'correct_password'}, + ) + assert response.status_code == 423 + assert 'Account locked' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_authenticate_with_already_locked_account( + test_client, test_session: AsyncSession, test_user_id +): + """Test authentication attempt with an already locked account""" + # Get max failed attempts from environment variable, default to 3 + max_failed_attempts = int(os.getenv('MAX_FAILED_ATTEMPTS', 3)) + + hashed_password = hash_password('test_password') + current_time = datetime.now(timezone.utc) + locked_until = current_time + timedelta(hours=1) # Locked for 1 hour + + async with test_session() as session: + # Create locked user + user = User( + id=test_user_id, + email='locked_user@example.com', + password=hashed_password, + first_name='Locked', + last_name='User', + failed_attempts=max_failed_attempts, + locked_until=locked_until, + last_failed_attempt=current_time, + ) + session.add(user) + await session.commit() + + # Attempt login with correct credentials should still be rejected + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'locked_user@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 423 + assert 'Account locked' in response.json()['meta']['error'] + assert 'Try again in' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_authenticate_resets_failed_attempts_on_success( + test_client, test_session: AsyncSession, test_user_id +): + """Test that successful login resets failed attempts counter""" + # Get max failed attempts from environment variable, default to 3 + max_failed_attempts = int(os.getenv('MAX_FAILED_ATTEMPTS', 3)) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + current_time = datetime.now(timezone.utc) + + async with test_session() as session: + # Create user with some failed attempts but not locked + user = User( + id=test_user_id, + email='reset_attempts@example.com', + password=hashed_password, + first_name='Test', + last_name='User', + failed_attempts=max_failed_attempts + - 1, # Has failed attempts but not locked yet + last_failed_attempt=current_time - timedelta(minutes=30), + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Successful login should reset failed attempts + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'reset_attempts@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + # Verify failed attempts were reset in database + async with test_session() as session: + updated_user = await session.get(User, test_user_id) + assert updated_user.failed_attempts == 0 + assert updated_user.locked_until is None + assert updated_user.last_failed_attempt is None + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_feature_disabled( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that inactive users can login when feature flag is disabled""" + # Get threshold from config (same config that service uses) + threshold_days = int(mock_config['auth']['inactive_days_threshold']) + + # Mock the feature flag to be disabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return False + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + + # Create user inactive for (threshold + 30) days + inactive_date = datetime.now(timezone.utc) - timedelta(days=threshold_days + 30) + + async with test_session() as session: + # Create user with old last_login_at + user = User( + id=test_user_id, + email='inactive_test@example.com', + password=hashed_password, + first_name='Inactive', + last_name='User', + last_login_at=inactive_date, + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Should succeed even though user is inactive (feature disabled) + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'inactive_test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_feature_enabled_first_time_user( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that first-time users (no last_login_at) can login when feature is enabled""" + + # Mock the feature flag to be enabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return True + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + + async with test_session() as session: + # Create user with no last_login_at (first-time user) + user = User( + id=test_user_id, + email='firsttime_test@example.com', + password=hashed_password, + first_name='FirstTime', + last_name='User', + # last_login_at is None by default + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Should succeed for first-time user even with feature enabled + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'firsttime_test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_feature_enabled_within_threshold( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that active users within threshold can login when feature is enabled""" + # Get threshold from config (same config that service uses) + threshold_days = int(mock_config['auth']['inactive_days_threshold']) + + # Mock the feature flag to be enabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return True + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + + # Create user active within threshold (threshold - 30 days ago) + recent_date = datetime.now(timezone.utc) - timedelta(days=threshold_days - 30) + + async with test_session() as session: + # Create user with recent last_login_at + user = User( + id=test_user_id, + email='active_test@example.com', + password=hashed_password, + first_name='Active', + last_name='User', + last_login_at=recent_date, + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Should succeed for active user within threshold + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'active_test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_feature_enabled_over_threshold( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that inactive users over threshold are rejected when feature is enabled""" + # Get threshold from config (same config that service uses) + threshold_days = int(mock_config['auth']['inactive_days_threshold']) + + # Mock the feature flag to be enabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return True + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + + # Create user inactive for (threshold + 30) days - clearly over threshold + inactive_date = datetime.now(timezone.utc) - timedelta(days=threshold_days + 30) + + async with test_session() as session: + # Create user with old last_login_at + user = User( + id=test_user_id, + email='very_inactive_test@example.com', + password=hashed_password, + first_name='VeryInactive', + last_name='User', + last_login_at=inactive_date, + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Should be rejected due to inactivity + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'very_inactive_test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 403 + assert 'disabled due to inactivity' in response.json()['meta']['error'] + assert 'days ago' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_authenticate_updates_last_login_timestamp( + test_client, test_session: AsyncSession, test_user_id +): + """Test that successful login updates user's last_login_at timestamp""" + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('test_password') + + # Create user with old last_login_at timestamp + old_login_date = datetime.now(timezone.utc) - timedelta(days=10) + + async with test_session() as session: + # Create user with old last_login_at + user = User( + id=test_user_id, + email='update_timestamp_test@example.com', + password=hashed_password, + first_name='Update', + last_name='User', + last_login_at=old_login_date, + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Perform successful authentication + response = test_client.post( + '/floware/v1/authenticate', + json={ + 'email': 'update_timestamp_test@example.com', + 'password': 'test_password', + }, + ) + + assert response.status_code == 200 + assert response.json()['data']['user']['access_token'] == 'mock_token' + + # Verify last_login_at was updated in database + async with test_session() as session: + updated_user = await session.get(User, test_user_id) + assert updated_user.last_login_at is not None + + # Handle timezone-aware/naive datetime comparison for both timestamps + updated_login_time = updated_user.last_login_at + if updated_login_time.tzinfo is None: + updated_login_time = updated_login_time.replace(tzinfo=timezone.utc) + + old_login_time = old_login_date + if old_login_time.tzinfo is None: + old_login_time = old_login_time.replace(tzinfo=timezone.utc) + + # Verify the timestamp was updated (should be more recent than the old timestamp) + assert updated_login_time > old_login_time + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_with_wrong_password( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that inactivity error takes precedence over wrong password error""" + # Get threshold from config (same config that service uses) + threshold_days = int(mock_config['auth']['inactive_days_threshold']) + + # Mock the feature flag to be enabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return True + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Create test IDs + role_id = str(uuid4()) + resource_id = str(uuid4()) + hashed_password = hash_password('correct_password') + + # Create user inactive for (threshold + 30) days + inactive_date = datetime.now(timezone.utc) - timedelta(days=threshold_days + 30) + + async with test_session() as session: + # Create inactive user + user = User( + id=test_user_id, + email='inactive_wrong_pwd_test@example.com', + password=hashed_password, + first_name='InactiveWrong', + last_name='User', + last_login_at=inactive_date, + ) + session.add(user) + await session.commit() + + # Create role and resource setup for console access + role = Role(id=role_id, name='Test Role') + resource = Resource( + id=resource_id, + key='console_resource', + value='test_resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([role, resource]) + await session.commit() + + # Create role-resource and user-role mappings + role_resource = RoleResource(role_id=role_id, resource_id=resource_id) + user_role = UserRole(user_id=test_user_id, role_id=role_id) + session.add_all([role_resource, user_role]) + await session.commit() + + # Use wrong password - should show inactivity error, not wrong password error + response = test_client.post( + '/floware/v1/authenticate', + json={ + 'email': 'inactive_wrong_pwd_test@example.com', + 'password': 'wrong_password', + }, + ) + + assert response.status_code == 403 + assert 'disabled due to inactivity' in response.json()['meta']['error'] + # Should NOT show "Incorrect username or password" + assert 'Incorrect username or password' not in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_authenticate_inactive_account_with_lockout( + test_client, test_session: AsyncSession, test_user_id, monkeypatch, mock_config +): + """Test that lockout error takes precedence over inactivity error""" + # Get threshold from config (same config that service uses) + threshold_days = int(mock_config['auth']['inactive_days_threshold']) + + # Mock the feature flag to be enabled + def mock_is_feature_enabled(feature: str) -> bool: + if feature == 'INACTIVE_ACCOUNT_DISABLE_FLAG': + return True + return False + + monkeypatch.setattr( + 'common_module.feature.feature_flag.is_feature_enabled', mock_is_feature_enabled + ) + monkeypatch.setattr( + 'user_management_module.controllers.auth_controller.is_feature_enabled', + mock_is_feature_enabled, + ) + + # Get max failed attempts from environment variable, default to 3 + max_failed_attempts = int(os.getenv('MAX_FAILED_ATTEMPTS', 3)) + + hashed_password = hash_password('test_password') + current_time = datetime.now(timezone.utc) + + # Create user both inactive (threshold + 30 days) AND locked + inactive_date = current_time - timedelta(days=threshold_days + 30) + locked_until = current_time + timedelta(hours=1) + + async with test_session() as session: + # Create user that is both inactive and locked + user = User( + id=test_user_id, + email='inactive_locked_test@example.com', + password=hashed_password, + first_name='InactiveLocked', + last_name='User', + last_login_at=inactive_date, + failed_attempts=max_failed_attempts, + locked_until=locked_until, + last_failed_attempt=current_time, + ) + session.add(user) + await session.commit() + + # Should show lockout error first, not inactivity error + response = test_client.post( + '/floware/v1/authenticate', + json={'email': 'inactive_locked_test@example.com', 'password': 'test_password'}, + ) + + assert response.status_code == 423 # 423 for locked accounts + assert 'Account locked' in response.json()['meta']['error'] + # Should NOT show inactivity error when user is also locked + assert 'disabled due to inactivity' not in response.json()['meta']['error'] diff --git a/wavefront/server/modules/user_management_module/tests/test_user_controller.py b/wavefront/server/modules/user_management_module/tests/test_user_controller.py new file mode 100644 index 00000000..a5b5b075 --- /dev/null +++ b/wavefront/server/modules/user_management_module/tests/test_user_controller.py @@ -0,0 +1,1321 @@ +import uuid +from datetime import datetime, timedelta, timezone + +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +import pytest +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from user_management_module.utils.user_utils import get_session_cache_key + + +async def create_session(test_session: AsyncSession, test_user_id, test_session_id): + user = User( + id=test_user_id, + email='test@example.com', + password='hashed_password', + first_name='Test', + last_name='User', + ) + + # Create a session in the database + db_session = Session( + id=test_session_id, user_id=test_user_id, device_info='test_device' + ) + + async with test_session() as session: + session.add(user) + session.add(db_session) + await session.commit() + + +async def setup_role_with_console_resource(test_session: AsyncSession, role_id: str): + async with test_session() as session: + # Create console resource + console_resource = Resource( + key='console_access', + value='true', + description='Console access resource', + scope=ResourceScope.CONSOLE, + ) + session.add(console_resource) + await session.flush() + + # Create role + role = Role(id=role_id, name='Test Role') + session.add(role) + await session.flush() + + # Link role with console resource + role_resource = RoleResource(role_id=role.id, resource_id=console_resource.id) + session.add(role_resource) + await session.commit() + + +@pytest.mark.asyncio +async def test_create_user_success( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + # Create test role with console resource + await create_session(test_session, test_user_id, test_session_id) + await setup_role_with_console_resource(test_session, 'test_role_id') + + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['test_role_id'], + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 200 + # checking if the user is created in the database + async with test_session() as session: + user = await session.execute( + select(User).where(User.email == new_user_data['email']) + ) + assert user is not None + + +@pytest.mark.asyncio +async def test_send_reset_password_email_soft_deleted_user( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test that soft deleted users cannot send reset password emails""" + # Create test user and session + await create_session(test_session, test_user_id, test_session_id) + + # Create a soft deleted user for password reset attempt + async with test_session() as session: + user = User( + email='deleted_reset@example.com', + password='hashedpassword', + first_name='Deleted', + last_name='User', + deleted=True, # User is soft deleted + ) + session.add(user) + await session.commit() + + response = test_client.post( + '/floware/v1/user/send-reset-password-email?email=deleted_reset@example.com', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 400 + assert 'No user found with this email ID' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_create_user_duplicate_email( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + # Create existing user + async with test_session() as session: + user = User( + email='existing@example.com', + password='hashedpassword', + first_name='Existing', + last_name='User', + ) + session.add(user) + await session.commit() + + new_user_data = { + 'email': 'existing@example.com', + 'password': 'Test@123', + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['test_role_id'], + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_update_user_success( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, + mock_admin_false_functions, +): + await create_session(test_session, test_user_id, test_session_id) + # Create test user and role + async with test_session() as session: + user = User( + email='update_test@example.com', # Changed email to avoid conflict + password='hashedpassword', + first_name='Test', + last_name='User', + ) + session.add(user) + await session.flush() + user_id = str(user.id) # Get the ID before committing + + role = Role(id='new_role_id', name='New Role') + session.add(role) + await session.commit() + + update_data = { + 'user_id': user_id, # Use the stored ID + 'add_role_ids': ['new_role_id'], + # Omit delete_role_ids since it's optional and we don't want to delete any roles + } + + response = test_client.patch( + '/floware/v1/users', + json=update_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + + # Verify the role assignment in the database + async with test_session() as session: + user_roles = await session.execute( + select(UserRole).where(UserRole.user_id == user_id) + ) + user_roles = user_roles.scalars().all() + assert len(user_roles) == 1 + assert user_roles[0].role_id == 'new_role_id' + + +@pytest.mark.asyncio +async def test_get_all_users( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + # Create test users + async with test_session() as session: + user1 = User( + email='user1@example.com', + password='hashedpassword', + first_name='User', + last_name='One', + ) + user2 = User( + email='user2@example.com', + password='hashedpassword', + first_name='User', + last_name='Two', + ) + session.add_all([user1, user2]) + await session.commit() + + response = test_client.get( + '/floware/v1/users', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 200 + assert len(response.json()['data']['users']) >= 2 + + +@pytest.mark.asyncio +async def test_delete_user_success( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + # Create test user + async with test_session() as session: + user = User( + email='delete@example.com', + password='hashedpassword', + first_name='Delete', + last_name='User', + ) + session.add(user) + await session.flush() + user_id = str(user.id) # Get the ID before committing + await session.commit() + + response = test_client.delete( + f'/floware/v1/users?id={user_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert 'User deleted successfully' in response.json()['data']['message'] + # checking if the user is deleted from the database + async with test_session() as session: + user = await session.execute(select(User).where(User.id == user_id)) + user_obj = user.scalar_one() + assert user_obj.deleted is True + + roles_res = await session.execute( + select(UserRole).where(UserRole.user_id == user_id) + ) + assert len(roles_res.scalars().all()) == 0 + + +@pytest.mark.asyncio +async def test_create_user_reactivates_soft_deleted_user( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test that creating a user with email of soft-deleted user reactivates the account""" + await create_session(test_session, test_user_id, test_session_id) + await setup_role_with_console_resource(test_session, 'test_role_id') + + # Create a soft-deleted user + soft_deleted_user_id = str(uuid.uuid4()) + async with test_session() as session: + # Create the user first + user = User( + id=soft_deleted_user_id, + email='softdeleted@example.com', + password='old_hashed_password', + first_name='Old', + last_name='Name', + deleted=True, # Soft deleted + ) + session.add(user) + await session.flush() + + # Add old role (will be replaced) + old_role = Role(id='old_role_id', name='Old Role') + session.add(old_role) + await session.flush() + + # Create old role-resource mapping + old_console_resource = Resource( + key='old_console_access', + value='true', + description='Old console access', + scope=ResourceScope.CONSOLE, + ) + session.add(old_console_resource) + await session.flush() + + old_role_resource = RoleResource( + role_id='old_role_id', resource_id=old_console_resource.id + ) + session.add(old_role_resource) + + # No user roles initially (soft delete removes them) + await session.commit() + + # Try to create a "new" user with the same email + new_user_data = { + 'email': 'softdeleted@example.com', # Same email as soft-deleted user + 'password': 'NewPassword@123', + 'first_name': 'New', + 'last_name': 'Name', + 'role_id': ['test_role_id'], # New role + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + # Should succeed with reactivation + assert response.status_code == 200 + response_data = response.json() + assert 'User account reactivated successfully' in response_data['data']['message'] + assert response_data['data']['user_id'] == soft_deleted_user_id + + # Verify user is reactivated in database + async with test_session() as session: + # Check user is no longer deleted + user_result = await session.execute( + select(User).where(User.id == soft_deleted_user_id) + ) + reactivated_user = user_result.scalar_one() + + assert reactivated_user.deleted is False + assert reactivated_user.first_name == 'New' # Updated + assert reactivated_user.last_name == 'Name' # Updated + assert reactivated_user.email == 'softdeleted@example.com' # Same + assert reactivated_user.failed_attempts == 0 # Reset + assert reactivated_user.locked_until is None # Reset + + # Check new roles are assigned + user_roles_result = await session.execute( + select(UserRole).where(UserRole.user_id == soft_deleted_user_id) + ) + user_roles = user_roles_result.scalars().all() + + # Should have the new role assigned + role_ids = [ur.role_id for ur in user_roles] + assert 'test_role_id' in role_ids + + +@pytest.mark.asyncio +async def test_create_user_reactivation_validates_roles( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test that user reactivation validates roles properly""" + await create_session(test_session, test_user_id, test_session_id) + + # Create a soft-deleted user + soft_deleted_user_id = str(uuid.uuid4()) + async with test_session() as session: + user = User( + id=soft_deleted_user_id, + email='rolevalidation@example.com', + password='old_password', + first_name='Test', + last_name='User', + deleted=True, + ) + session.add(user) + await session.commit() + + # Try to reactivate with invalid role + new_user_data = { + 'email': 'rolevalidation@example.com', + 'password': 'NewPassword@123', + 'first_name': 'New', + 'last_name': 'User', + 'role_id': ['nonexistent_role_id'], # Invalid role + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + # Should fail with invalid role error - FIXED + assert response.status_code == 400 + assert 'Invalid role IDs' in response.json()['meta']['error'] + + # Verify user is still soft-deleted + async with test_session() as session: + user_result = await session.execute( + select(User).where(User.id == soft_deleted_user_id) + ) + user = user_result.scalar_one() + assert user.deleted is True # Still deleted + + +@pytest.mark.asyncio +async def test_create_user_reactivation_requires_console_resource( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test that user reactivation requires console resource""" + await create_session(test_session, test_user_id, test_session_id) + + # Create a role without console resource + async with test_session() as session: + role_without_console = Role(id='no_console_role', name='No Console Role') + session.add(role_without_console) + + # Create non-console resource + data_resource = Resource( + key='data_access', + value='true', + description='Data access only', + scope=ResourceScope.DATA, + ) + session.add(data_resource) + await session.flush() + + role_resource = RoleResource( + role_id='no_console_role', resource_id=data_resource.id + ) + session.add(role_resource) + + # Create soft-deleted user + soft_deleted_user_id = str(uuid.uuid4()) + user = User( + id=soft_deleted_user_id, + email='noconsole@example.com', + password='old_password', + first_name='Test', + last_name='User', + deleted=True, + ) + session.add(user) + await session.commit() + + # Try to reactivate with role that has no console resource + new_user_data = { + 'email': 'noconsole@example.com', + 'password': 'NewPassword@123', + 'first_name': 'New', + 'last_name': 'User', + 'role_id': ['no_console_role'], + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + # Should fail with console resource requirement + assert response.status_code == 400 + assert 'console resource is mandatory' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_create_user_active_user_blocks_creation( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test that active user with same email blocks new user creation""" + await create_session(test_session, test_user_id, test_session_id) + await setup_role_with_console_resource(test_session, 'test_role_id') + + # Create an active user + async with test_session() as session: + active_user = User( + email='active@example.com', + password='password', + first_name='Active', + last_name='User', + deleted=False, # Active user + ) + session.add(active_user) + await session.commit() + + # Try to create user with same email + new_user_data = { + 'email': 'active@example.com', + 'password': 'NewPassword@123', + 'first_name': 'New', + 'last_name': 'User', + 'role_id': ['test_role_id'], + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + # Should fail with user already exists + assert response.status_code == 400 + assert 'User with the same email already exists' in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_authenticate_deleted_user( + test_client, test_session: AsyncSession, test_user_id +): + # Setup user + role + console resource + from user_management_module.utils.password_utils import hash_password + + hashed_password = hash_password('test_password') + + async with test_session() as session: + user = User( + id=test_user_id, + email='deleted@example.com', + password=hashed_password, + first_name='Del', + last_name='User', + deleted=True, + ) + session.add(user) + await session.flush() + + role = Role(id=str(uuid.uuid4()), name='Role') + session.add(role) + await session.flush() + + resource = Resource( + id=str(uuid.uuid4()), + key='console_resource', + value='x', + description='desc', + scope=ResourceScope.CONSOLE, + ) + session.add(resource) + await session.flush() + + rr = RoleResource(role_id=role.id, resource_id=resource.id) + session.add(rr) + await session.flush() + + ur = UserRole(user_id=test_user_id, role_id=role.id) + session.add(ur) + await session.commit() + + resp = test_client.post( + '/floware/v1/authenticate', + json={'email': 'deleted@example.com', 'password': 'test_password'}, + ) + assert resp.status_code == 403 + + +@pytest.mark.asyncio +async def test_delete_user_invalidates_all_sessions_db_and_cache( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, + setup_containers, +): + # Create a separate user to delete + async with test_session() as session: + # inside async with test_session() as session: + user = User( + email='invalidate@example.com', + password='hashed', + first_name='Inv', + last_name='User', + ) + session.add(user) + await session.flush() + target_user_uuid = user.id # capture before any commit + target_user_id = str(target_user_uuid) + + # sessions โ€” capture ids upfront + s1_id = str(uuid.uuid4()) + s2_id = str(uuid.uuid4()) + s1 = Session(id=s1_id, user_id=target_user_uuid, device_info='dev1') + s2 = Session(id=s2_id, user_id=target_user_uuid, device_info='dev2') + session.add_all([s1, s2]) + + # role and mapping BEFORE commit (and use captured user id, not user.id) + role = Role(id='invalidate_role', name='Invalidate Role') + session.add(role) + await session.flush() + session.add(UserRole(user_id=target_user_uuid, role_id=role.id)) + + await session.commit() + + # Delete the user (soft delete) + resp_del = test_client.delete( + f'/floware/v1/users?id={target_user_id}', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert resp_del.status_code == 200 + + # Sessions should be removed from DB + from sqlalchemy import select + + async with test_session() as session: + result = await session.execute( + select(Session).where(Session.user_id == target_user_uuid) + ) + assert result.scalars().all() == [] + + # Session cache keys should be removed + _, _, user_container = setup_containers + cache_manager = user_container.cache_manager() + cache_manager.remove.assert_any_call(get_session_cache_key(s1_id)) + cache_manager.remove.assert_any_call(get_session_cache_key(s1_id)) + + cache_manager.remove.assert_any_call(target_user_id) + + +@pytest.mark.asyncio +async def test_authenticate_replaces_existing_sessions( + test_client, test_session: AsyncSession, test_user_id, setup_containers +): + from user_management_module.utils.password_utils import hash_password + from uuid import uuid4 + + # Prepare user with role + console resource + # 1) user + hashed = hash_password('pw') + async with test_session() as session: + user = User( + id=test_user_id, + email='x@example.com', + password=hashed, + first_name='X', + last_name='Y', + ) + session.add(user) + await session.flush() + # 2) role + console resource + mapping + role = Role(id=str(uuid4()), name='Role') + res = Resource( + id=str(uuid4()), key='console', value='true', scope=ResourceScope.CONSOLE + ) + session.add_all([role, res]) + await session.flush() + session.add(RoleResource(role_id=role.id, resource_id=res.id)) + session.add(UserRole(user_id=user.id, role_id=role.id)) + # 3) pre-existing sessions + s1_id, s2_id = str(uuid4()), str(uuid4()) + session.add_all( + [ + Session(id=s1_id, user_id=user.id, device_info='dev1'), + Session(id=s2_id, user_id=user.id, device_info='dev2'), + ] + ) + await session.commit() + + # Login: should remove s1/s2 and create a fresh session + resp = test_client.post( + '/floware/v1/authenticate', json={'email': 'x@example.com', 'password': 'pw'} + ) + assert resp.status_code == 200 + + # DB sessions for user should be 1 (the newly created one) + async with test_session() as session: + rows = ( + ( + await session.execute( + select(Session).where(Session.user_id == test_user_id) + ) + ) + .scalars() + .all() + ) + assert len(rows) == 1 + + # Cache invalidations were called for old sessions + _, _, user_container = setup_containers + cache_manager = user_container.cache_manager() + cache_manager.remove.assert_any_call(get_session_cache_key(s1_id)) + cache_manager.remove.assert_any_call(get_session_cache_key(s1_id)) + + +@pytest.mark.asyncio +async def test_authenticate_enabled_user_without_roles_fails( + test_client, test_session: AsyncSession, test_user_id +): + from user_management_module.utils.password_utils import hash_password + + hashed_password = hash_password('test_password') + + async with test_session() as session: + # user enabled + user = User( + id=test_user_id, + email='norole@example.com', + password=hashed_password, + first_name='No', + last_name='Role', + deleted=False, + ) + session.add(user) + await session.flush() + + # create role + console resource and mapping, then remove the mapping to simulate deleted roles + role = Role(id=str(uuid.uuid4()), name='Role') + session.add(role) + await session.flush() + + resource = Resource( + id=str(uuid.uuid4()), + key='console_resource', + value='x', + description='desc', + scope=ResourceScope.CONSOLE, + ) + session.add(resource) + await session.flush() + + rr = RoleResource(role_id=role.id, resource_id=resource.id) + session.add(rr) + await session.flush() + + ur = UserRole(user_id=test_user_id, role_id=role.id) + session.add(ur) + await session.flush() + + # Remove user roles + await session.delete(ur) + await session.commit() + + resp = test_client.post( + '/floware/v1/authenticate', + json={'email': 'norole@example.com', 'password': 'test_password'}, + ) + # Should fail because there is no console role now + assert resp.status_code == 403 + + +@pytest.mark.asyncio +async def test_send_reset_password_email( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + # Create test user and session + await create_session(test_session, test_user_id, test_session_id) + + # Create another test user for password reset + async with test_session() as session: + user = User( + email='reset@example.com', + password='hashedpassword', + first_name='Reset', + last_name='User', + ) + session.add(user) + await session.commit() + + response = test_client.post( + '/floware/v1/user/send-reset-password-email?email=reset@example.com', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert 'password reset link has been sent' in response.json()['data']['message'] + + +@pytest.mark.asyncio +async def test_reset_password( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + # Create test user + await create_session(test_session, test_user_id, test_session_id) + + async with test_session() as session: + user = User( + email='reset@example.com', + password='oldpassword', + first_name='Reset', + last_name='User', + ) + session.add(user) + await session.commit() + + reset_data = { + 'secret_token': 'mock_token', # Use the mock token that matches our mock setup + 'new_password': 'Test@123', # Updated password with special character + } + + response = test_client.post( + '/floware/v1/user/reset-password', + json=reset_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + assert ( + 'password has been updated successfully' in response.json()['data']['message'] + ) + + +@pytest.mark.asyncio +async def test_whoami_endpoint( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + response = test_client.get( + '/floware/v1/whoami', headers={'Authorization': f'Bearer {auth_token}'} + ) + assert response.status_code == 200 + assert 'user' in response.json()['data'] + + +@pytest.mark.asyncio +async def test_update_user_invalid_role( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + update_data = { + 'user_id': test_user_id, # Use the actual UUID from the fixture + 'add_role_ids': ['invalid_role_id'], + } + + response = test_client.patch( + '/floware/v1/users', + json=update_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 400 + assert 'Invalid role IDs' in response.json()['meta']['error'] + + +# adding test for non-admin user +@pytest.mark.asyncio +async def test_non_admin_user_create_user( + test_client, + mock_admin_false_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + await create_session(test_session, test_user_id, test_session_id) + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['test_role_id'], + } + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 401 + + +# chekcing when admin user is created and all roles are assigned to the user +@pytest.mark.asyncio +async def test_admin_user_create_user( + test_client, + test_session, + test_user_id, + test_session_id, + auth_token, + mocking_user_controller_is_admin, +): + await create_session(test_session, test_user_id, test_session_id) + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['test_role_id', 'test_role_id2'], + } + + async with test_session() as session: + # Create roles + role1 = Role(id='test_role_id', name='Test Role') + role2 = Role(id='test_role_id2', name='Test Role 2') + session.add_all([role1, role2]) + await session.flush() + + # Create console resources + console_resource1 = Resource( + id=str(uuid.uuid4()), + key='console_access', + value='true', + description='Console access resource', + scope=ResourceScope.CONSOLE, + ) + console_resource2 = Resource( + id=str(uuid.uuid4()), + key='console_manage', + value='true', + description='Console management resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([console_resource1, console_resource2]) + await session.flush() + + # Link roles with console resources + role_resource1 = RoleResource( + role_id='test_role_id', resource_id=console_resource1.id + ) + role_resource2 = RoleResource( + role_id='test_role_id2', resource_id=console_resource2.id + ) + session.add_all([role_resource1, role_resource2]) + await session.commit() + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + # checking if the role is assigned to the user in user_role table + new_user_id = response.json()['data']['user_id'] + async with test_session() as session: + user_role = await session.execute( + select(UserRole).where(UserRole.user_id == new_user_id) + ) + user_role = user_role.scalars().all() + assert len(user_role) == 2 + + +# chekcing when admin user is created and all roles are assigned to the user +@pytest.mark.asyncio +async def test_admin_user_creat_non_admin_user( + test_client, + test_session, + test_user_id, + test_session_id, + auth_token, + mocking_user_controller_is_admin, + mocking_user_controller_get_current_user, +): + await create_session(test_session, test_user_id, test_session_id) + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['test_role_id'], + } + + async with test_session() as session: + # Create roles + role1 = Role(id='test_role_id', name='Test Role') + role2 = Role(id='test_role_id2', name='Test Role 2') + session.add_all([role1, role2]) + await session.flush() + + # Create console resources + console_resource1 = Resource( + id=str(uuid.uuid4()), + key='console_access', + value='true', + description='Console access resource', + scope=ResourceScope.CONSOLE, + ) + console_resource2 = Resource( + id=str(uuid.uuid4()), + key='console_manage', + value='true', + description='Console management resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([console_resource1, console_resource2]) + await session.flush() + + # Link roles with console resources + role_resource1 = RoleResource( + role_id='test_role_id', resource_id=console_resource1.id + ) + role_resource2 = RoleResource( + role_id='test_role_id2', resource_id=console_resource2.id + ) + session.add_all([role_resource1, role_resource2]) + await session.commit() + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 200 + # checking if the role is assigned to the user in user_role table + new_user_id = response.json()['data']['user_id'] + async with test_session() as session: + user_role = await session.execute( + select(UserRole).where(UserRole.user_id == new_user_id) + ) + user_role = user_role.scalars().all() + assert len(user_role) == 1 + + +@pytest.mark.asyncio +async def test_admin_user_creat_non_admin_user_with_invalid_role( + test_client, + test_session, + test_user_id, + test_session_id, + auth_token, + mocking_user_controller_is_admin, + mocking_user_controller_get_current_user, +): + await create_session(test_session, test_user_id, test_session_id) + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': ['123213123'], + } + + async with test_session() as session: + # Create roles + role1 = Role(id='test_role_id', name='Test Role') + role2 = Role(id='test_role_id2', name='Test Role 2') + session.add_all([role1, role2]) + await session.flush() + + # Create console resources + console_resource1 = Resource( + id=str(uuid.uuid4()), + key='console_access', + value='true', + description='Console access resource', + scope=ResourceScope.CONSOLE, + ) + console_resource2 = Resource( + id=str(uuid.uuid4()), + key='console_manage', + value='true', + description='Console management resource', + scope=ResourceScope.CONSOLE, + ) + session.add_all([console_resource1, console_resource2]) + await session.flush() + + # Link roles with console resources + role_resource1 = RoleResource( + role_id='test_role_id', resource_id=console_resource1.id + ) + role_resource2 = RoleResource( + role_id='test_role_id2', resource_id=console_resource2.id + ) + session.add_all([role_resource1, role_resource2]) + await session.commit() + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 400 + + +@pytest.mark.asyncio +async def test_admin_user_creat_non_admin_user_with_empty_role( + test_client, + test_session, + test_user_id, + test_session_id, + auth_token, + mocking_user_controller_is_admin, + mocking_user_controller_get_current_user, +): + await create_session(test_session, test_user_id, test_session_id) + new_user_data = { + 'email': 'test2@example.com', + 'password': 'Test@123', # Updated password with special character + 'first_name': 'Test', + 'last_name': 'User', + 'role_id': [], + } + + response = test_client.post( + '/floware/v1/users', + json=new_user_data, + headers={'Authorization': f'Bearer {auth_token}'}, + ) + assert response.status_code == 422 + + +@pytest.mark.asyncio +async def test_unblock_user_success( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test successful user unblock by admin""" + await create_session(test_session, test_user_id, test_session_id) + + current_time = datetime.now(timezone.utc) + locked_until = current_time + timedelta(hours=1) # User is locked + + # Create a locked user + locked_user_id = None + async with test_session() as session: + locked_user = User( + email='locked_user@example.com', + password='hashedpassword', + first_name='Locked', + last_name='User', + failed_attempts=3, + locked_until=locked_until, + last_failed_attempt=current_time, + ) + session.add(locked_user) + await session.flush() + locked_user_id = str(locked_user.id) + await session.commit() + + # Admin unblocks the user + response = test_client.patch( + f'/floware/v1/users/{locked_user_id}/unblock', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 200 + assert 'successfully unblocked' in response.json()['data']['message'] + assert locked_user_id in response.json()['data']['message'] + + # Verify user is actually unblocked in database + async with test_session() as session: + unblocked_user = await session.execute( + select(User).where(User.email == 'locked_user@example.com') + ) + unblocked_user = unblocked_user.scalars().first() + assert unblocked_user.failed_attempts == 0 + assert unblocked_user.locked_until is None + assert unblocked_user.last_failed_attempt is None + + +@pytest.mark.asyncio +async def test_unblock_nonexistent_user( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test unblock attempt for non-existent user""" + await create_session(test_session, test_user_id, test_session_id) + + # Use a fake UUID for non-existent user + fake_user_id = str(uuid.uuid4()) + + response = test_client.patch( + f'/floware/v1/users/{fake_user_id}/unblock', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 404 + assert 'not found' in response.json()['meta']['error'] + assert fake_user_id in response.json()['meta']['error'] + + +@pytest.mark.asyncio +async def test_unblock_user_non_admin_access_denied( + test_client, + mock_admin_false_functions, # This makes the user non-admin + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test unblock access denied for non-admin user""" + await create_session(test_session, test_user_id, test_session_id) + + # Create a locked user + current_time = datetime.now(timezone.utc) + locked_until = current_time + timedelta(hours=1) + + locked_user_id = None + async with test_session() as session: + locked_user = User( + email='locked_user@example.com', + password='hashedpassword', + first_name='Locked', + last_name='User', + failed_attempts=3, + locked_until=locked_until, + last_failed_attempt=current_time, + ) + session.add(locked_user) + await session.flush() + locked_user_id = str(locked_user.id) + await session.commit() + + # Non-admin user attempts to unblock + response = test_client.patch( + f'/floware/v1/users/{locked_user_id}/unblock', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + assert response.status_code == 401 + assert 'Access denied' in response.json()['meta']['error'] + + # Verify user is still locked in database + async with test_session() as session: + still_locked_user = await session.execute( + select(User).where(User.email == 'locked_user@example.com') + ) + still_locked_user = still_locked_user.scalars().first() + assert still_locked_user.failed_attempts == 3 + assert still_locked_user.locked_until is not None + + +@pytest.mark.asyncio +async def test_unblock_already_unlocked_user( + test_client, + mock_auth_admin_user_functions, + test_session, + test_user_id, + test_session_id, + auth_token, +): + """Test unblock operation on a user that is already unlocked""" + await create_session(test_session, test_user_id, test_session_id) + + # Create an unlocked user (no lockout fields set) + unlocked_user_id = None + async with test_session() as session: + unlocked_user = User( + email='unlocked_user@example.com', + password='hashedpassword', + first_name='Unlocked', + last_name='User', + failed_attempts=0, # No failed attempts + locked_until=None, # Not locked + last_failed_attempt=None, # No previous failed attempts + ) + session.add(unlocked_user) + await session.flush() + unlocked_user_id = str(unlocked_user.id) + await session.commit() + + # Admin attempts to unblock already unlocked user + response = test_client.patch( + f'/floware/v1/users/{unlocked_user_id}/unblock', + headers={'Authorization': f'Bearer {auth_token}'}, + ) + + # Should still return success (idempotent operation) + assert response.status_code == 200 + assert 'successfully unblocked' in response.json()['data']['message'] diff --git a/wavefront/server/modules/user_management_module/user_management_module/authorization/require_auth.py b/wavefront/server/modules/user_management_module/user_management_module/authorization/require_auth.py new file mode 100644 index 00000000..969046bc --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/authorization/require_auth.py @@ -0,0 +1,563 @@ +from dataclasses import dataclass +import json +import hashlib +import hmac +import time +import os +import re + +from auth_module.auth_container import AuthContainer +from auth_module.services.token_service import TokenService +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.middleware.request_id_middleware import get_current_request_id +from common_module.response_formatter import ResponseFormatter +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.session import Session +from db_repo_module.models.auth_secrets import AuthSecrets +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Request +from fastapi import status +from user_management_module.constants.auth import SERVICE_AUTH_ROLE_ID, RootfloHeaders +from fastapi.responses import JSONResponse +import jwt +from starlette.middleware.base import BaseHTTPMiddleware +from user_management_module.user_container import UserContainer +from user_management_module.utils.user_utils import check_is_admin +from user_management_module.utils.user_utils import get_session_cache_key + +optional_auth_apis = [ + '/floware/v1/health', + '/floware/v1/authenticate', + '/', + '/google/login', + '/google/login/callback', + '/azure/login', + '/floware/oauth/azure/callback', + '/docs', + '/openapi.json', + '/floware/v1/user/send-reset-password-email', + '/floware/v1/user/reset-password', + '/floware/v1/data-sources/outlook/webhook/email_received', + '/v1/_metrics', + '/floware/v1/plugin-auth/authenticate', + '/floware/v1/oauth/google/callback', + '/floware/v1/oauth/microsoft/callback', + '/floware/v1/plugin-auth/oauth/init', + '/floware/v1/settings/config', +] + +hmac_routes = os.getenv('HMAC_AUTH_ROUTES', '').split(',') + +floware_jwt_audience = os.getenv('FLOWARE_JWT_AUDIENCE', '') + +floware_jwt_validation_issuer = os.getenv('FLOWARE_JWT_VALIDATION_ISSUER', '').split( + ',' +) + +console_token_prefix = os.getenv('CONSOLE_TOKEN_PREFIX', 'fc_') +passthrough_secret = os.getenv('PASSTHROUGH_SECRET') +environment = os.getenv('APP_ENV', 'dev') + +required_hmac_apis = ['/floware/v1/image/analyse', *hmac_routes] + + +admin_apis = [ + '/floware/v1/agent-management', + '/floware/v1/workflow-management', +] + + +def matches_dynamic_route(path: str, route_pattern: str) -> bool: + """ + Check if a path matches a dynamic route pattern. + + Args: + path: The actual request path (e.g., '/floware/v1/workflow-runs/123') + route_pattern: The route pattern with placeholders (e.g., '/floware/v1/workflow-runs/{workflow_run_id}') + + Returns: + bool: True if the path matches the pattern, False otherwise + """ + # Convert route pattern to regex + # First replace {param} with a placeholder, then escape, then replace placeholder + regex_pattern = route_pattern + # Replace {param} with a temporary placeholder + regex_pattern = re.sub(r'\{[^}]+\}', 'PLACEHOLDER', regex_pattern) + # Escape the pattern + regex_pattern = re.escape(regex_pattern) + # Replace the placeholder with the actual regex pattern + regex_pattern = regex_pattern.replace('PLACEHOLDER', r'[^/]+') + regex_pattern = f'^{regex_pattern}$' + + return bool(re.match(regex_pattern, path)) + + +async def validate_service_auth( + request: Request, + auth_secrets_repository: SQLAlchemyRepository[AuthSecrets], + token_service: TokenService, +) -> bool: + """Validate service-to-service authentication using Client-Key + JWT""" + try: + # Get Client-Key header + client_key = request.headers.get(RootfloHeaders.CLIENT_KEY) + if not client_key: + logger.warning('Missing Client-Key header for service auth') + return False + + # Get Authorization header + auth_header = request.headers.get('Authorization') + if not auth_header or not auth_header.startswith('Bearer '): + logger.warning('Missing or invalid Authorization header for service auth') + return False + + token = auth_header.split(' ')[1] + + # Look up client secret using client key + auth_secret = await auth_secrets_repository.find_one(client_key=client_key) + if not auth_secret: + logger.warning(f'Invalid client_key for service auth: {client_key}') + return False + + # Remove console prefix if present (fc_) + if token.startswith(console_token_prefix): + token = token[len(console_token_prefix) :] + + # Validate JWT using client secret (HS256 algorithm for service tokens) + try: + decoded = jwt.decode( + token, + auth_secret.client_secret, + algorithms=['HS256'], + issuer=floware_jwt_validation_issuer or '', + audience=floware_jwt_audience, + ) + + # For service tokens, we skip session validation + # Create a minimal session object for compatibility + request.state.session = UserSession( + role_id=decoded.get('role_id', 'service'), + user_id=decoded.get('user_id', 'service'), + session_id=decoded.get('session_id', 'service-token'), + ) + + logger.info('Valid service authentication') + return True + + except jwt.InvalidTokenError as e: + logger.warning(f'Invalid service token: {str(e)}') + return False + + except Exception as e: + logger.error(f'Error validating service authentication: {str(e)}') + return False + + +async def validate_hmac_signature( + request: Request, + auth_secrets_repository: SQLAlchemyRepository[AuthSecrets], +) -> bool: + """Validate HMAC signature from request headers.""" + try: + # Get required headers + client_key = request.headers.get(RootfloHeaders.CLIENT_KEY) + signature = request.headers.get(RootfloHeaders.SIGNATURE) + timestamp = request.headers.get(RootfloHeaders.TIMESTAMP) + + if not all([client_key, signature, timestamp]): + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning( + f'Missing HMAC headers: client_key={bool(client_key)}, signature={bool(signature)}, timestamp={bool(timestamp)} [Request ID: {request_id}]' + ) + return False + + # Validate timestamp to prevent replay attacks (5 minute window) + try: + request_timestamp = int(timestamp) + current_timestamp = int(time.time()) + if abs(current_timestamp - request_timestamp) > 300: # 5 minutes + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.warning( + f'Request timestamp too old or in future: {timestamp}, current: {current_timestamp} [Request ID: {request_id}]' + ) + return False + except ValueError: + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning( + f'Invalid timestamp format: {timestamp} [Request ID: {request_id}]' + ) + return False + + # Find the client secret + auth_secret = await auth_secrets_repository.find_one(client_key=client_key) + if not auth_secret: + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning( + f'Invalid client_key: {client_key} [Request ID: {request_id}]' + ) + return False + + body = await request.body() + + # Parse JSON body to extract nonce + try: + parsed_body = json.loads(body.decode('utf-8')) + nonce = parsed_body.get('nonce') + if not nonce: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.warning( + f"Missing 'nonce' field in request body [Request ID: {request_id}]" + ) + return False + if len(nonce) < 32: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.warning( + f"Minimum 'nonce' length required is 32 [Request ID: {request_id}]" + ) + return False + except (json.JSONDecodeError, UnicodeDecodeError): + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning(f'Invalid JSON in request body [Request ID: {request_id}]') + return False + + # Create the message to sign: nonce:timestamp + message_to_sign = f'{nonce}:{timestamp}' + + # Generate expected signature + expected_signature = hmac.new( + auth_secret.client_secret.encode('utf-8'), + message_to_sign.encode('utf-8'), + hashlib.sha256, + ).hexdigest() + + # Compare signatures + if not hmac.compare_digest(signature, expected_signature): + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning(f'Invalid HMAC signature [Request ID: {request_id}]') + return False + + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.info(f'Valid HMAC signature [Request ID: {request_id}]') + return True + + except Exception as e: + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.error( + f'Error validating HMAC signature: {str(e)} [Request ID: {request_id}]' + ) + return False + + +async def validate_mtls_auth(request: Request) -> bool: + """Validate mTLS authentication using X-Forwarded-Client-Cert header""" + try: + xfcc = request.headers.get('X-Forwarded-Client-Cert') + if not xfcc: + return False + + # Extract SPIFFE ID from URI field + # Format: Hash=...;URI=spiffe://...;... + match = re.search(r'URI=(spiffe://[^;,]+)', xfcc) + if match: + principal = match.group(1) + if not principal.startswith( + 'spiffe://cluster.local/ns/client-applications' + ): + logger.error(f'Invalid mTLS authentication. Principal: {principal}') + return False + + # Create a service session + request.state.session = UserSession( + role_id=SERVICE_AUTH_ROLE_ID, + user_id='service', + session_id='service-token', + ) + + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.info( + f'Valid mTLS authentication. Principal: {principal} [Request ID: {request_id}]' + ) + return True + + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.warning( + f'mTLS header present but no valid URI found: {xfcc} [Request ID: {request_id}]' + ) + return False + + except Exception as e: + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.error( + f'Error validating mTLS authentication: {str(e)} [Request ID: {request_id}]' + ) + return False + + +@dataclass +class UserSession: + role_id: str + user_id: str + session_id: str + + +class RequireAuthMiddleware(BaseHTTPMiddleware): + @inject + async def dispatch( + self, + request: Request, + call_next, + token_service: TokenService = Provide[AuthContainer.token_service], + response_formatter: ResponseFormatter = Provide[ + CommonContainer.response_formatter + ], + session_repository: SQLAlchemyRepository[Session] = Provide[ + UserContainer.session_repository + ], + cache_manager: CacheManager = Provide[UserContainer.cache_manager], + auth_secrets_repository: SQLAlchemyRepository[AuthSecrets] = Provide[ + UserContainer.auth_secrets_repository + ], + ): + try: + if request.method == 'OPTIONS': + return await call_next(request) + + # Check if this endpoint requires HMAC validation (skip JWT validation then) + if request.url.path in required_hmac_apis: + if not await validate_hmac_signature(request, auth_secrets_repository): + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'HMAC validation failed for {request.url.path} [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + 'Invalid HMAC signature' + ), + ) + # Check for service-to-service authentication (Client-Key header + JWT) + elif request.headers.get(RootfloHeaders.CLIENT_KEY): + if not await validate_service_auth( + request, auth_secrets_repository, token_service + ): + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Service authentication failed for {request.url.path} [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Invalid service authentication' + ), + ) + else: # Do the JWT validation or passthrough + authorization = request.headers.get('Authorization') + + token = None + if authorization and authorization.startswith('Bearer '): + token = authorization.split(' ')[1] + + # Skip authentication for optional APIs + if request.url.path in optional_auth_apis: + return await call_next(request) + + # For non-production environments: Check passthrough authentication globally + if environment != 'production' and request.headers.get( + RootfloHeaders.PASSTHROUGH + ): + passthrough = request.headers.get(RootfloHeaders.PASSTHROUGH) + logger.info(f'PASSTHROUGH header present: {passthrough}') + + if not passthrough_secret: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'PASSTHROUGH_SECRET environment variable not set [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + error='passthrough is not configured' + ), + ) + + if passthrough != passthrough_secret: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Invalid passthrough secret provided [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid passthrough secret' + ), + ) + + # Create a service session for passthrough auth + request.state.session = UserSession( + role_id=SERVICE_AUTH_ROLE_ID, + user_id='passthrough', + session_id='passthrough-token', + ) + return await call_next(request) + + # Check for mTLS authentication if no token is present + if request.headers.get('X-Forwarded-Client-Cert'): + if await validate_mtls_auth(request): + return await call_next(request) + + if not token: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Token missing in request for {request.url.path} [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Token missing in request' + ), + ) + decoded = token_service.decode_token(token) + if 'session_id' not in decoded: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Invalid token: missing session_id [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid token: session not found' + ), + ) + + # Try to get session from cache + session_cache_key = get_session_cache_key(decoded['session_id']) + cached_session = cache_manager.get_str(session_cache_key) + if cached_session: + try: + session_data = json.loads(cached_session) + if session_data.get('user_id') != decoded['user_id']: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Invalid session: session does not belong to user [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid session' + ), + ) + except json.JSONDecodeError: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Failed to decode cached session data, fetching from DB [Request ID: {request_id}]' + ) + cached_session = None + + if not cached_session: + # If not in cache, fetch from DB + session = await session_repository.find_one( + id=decoded['session_id'] + ) + if not session: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Invalid session: session not found in database [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid session' + ), + ) + + # Cache the session data + session_data = { + 'id': str(session.id), + 'user_id': str(session.user_id), + 'device_info': session.device_info, + } + + cache_manager.add( + session_cache_key, + json.dumps(session_data), + token_service.token_expiry, + ) + + if str(session.user_id) != decoded['user_id']: + request_id = getattr( + request.state, 'request_id', get_current_request_id() + ) + logger.error( + f'Invalid session: session does not belong to user [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Invalid session' + ), + ) + + session_obj = UserSession( + role_id=decoded['role_id'], + user_id=decoded['user_id'], + session_id=decoded['session_id'], + ) + request.state.session = session_obj + + # Check for admin-only APIs + for admin_api_prefix in admin_apis: + if request.url.path.startswith(admin_api_prefix): + is_admin = await check_is_admin(session_obj.role_id) + if not is_admin: + logger.warning( + f'Non-admin user {session_obj.user_id} attempted to access admin API: {request.url.path}' + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Admin access required' + ), + ) + break + + response = await call_next(request) + return response + + except jwt.ExpiredSignatureError as exc: + request_id = getattr(request.state, 'request_id', get_current_request_id()) + logger.error( + f'ExpiredSignatureError in require_auth middleware: {exc} [Request ID: {request_id}]' + ) + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + error='Token has expired. Please log in again.' + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/constants/auth.py b/wavefront/server/modules/user_management_module/user_management_module/constants/auth.py new file mode 100644 index 00000000..27c40f5c --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/constants/auth.py @@ -0,0 +1,11 @@ +"""Authentication constants.""" + + +class RootfloHeaders: + CLIENT_KEY = 'X-Rootflo-Key' + SIGNATURE = 'X-Rootflo-Signature' + TIMESTAMP = 'X-Rootflo-Timestamp' + PASSTHROUGH = 'X-Passthrough' + + +SERVICE_AUTH_ROLE_ID = 'floconsole-service' diff --git a/wavefront/server/modules/user_management_module/user_management_module/controllers/access_controller.py b/wavefront/server/modules/user_management_module/user_management_module/controllers/access_controller.py new file mode 100644 index 00000000..f223dd9a --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/controllers/access_controller.py @@ -0,0 +1,393 @@ +from typing import Optional +import uuid + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.user_role import UserRole +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Query +from fastapi import Request +from fastapi import status +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from sqlalchemy import func +from sqlalchemy import Result +from sqlalchemy import select +from sqlalchemy.orm import selectinload +from user_management_module.models.resource import CreateRolePayload +from user_management_module.models.resource import ResourcePayload +from user_management_module.models.resource import UpdateResourcePayload +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from user_management_module.utils.user_utils import check_is_admin +from user_management_module.utils.user_utils import get_current_user + +access_router = APIRouter(prefix='/v1/access') + + +@access_router.post('/resources') +@inject +async def create_resource( + request: Request, + payload: ResourcePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[UserContainer.resource_repository] + ), + role_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[UserContainer.role_repository] + ), + role_resource_repository: SQLAlchemyRepository[RoleResource] = Depends( + Provide[UserContainer.role_resource_repository] + ), + user_role_repository: SQLAlchemyRepository[UserRole] = Depends( + Provide[UserContainer.user_role_repository] + ), +): + user_role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(user_role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + resources: list[Resource] = [] + roles: list[Role] = [] + role_resources: list[RoleResource] = [] + + for res in payload.resources: + # Create role for each resource + role_id = uuid.uuid4() + resource_id = uuid.uuid4() + + resource = Resource( + id=resource_id, + key=res.key, + value=res.value, + description=res.description, + scope=res.scope, + meta=res.meta, + ) + + role = Role( + id=role_id, + name=f'{res.key} - {res.value}', + description=f'Resource role for {res.value}', + ) + + resources.append(resource) + roles.append(role) + + # Create role-resource mapping + role_resources.append(RoleResource(role_id=role_id, resource_id=resource_id)) + + async with resource_repository.session() as session: + async with session.begin(): + await resource_repository.create_all( + resources, replace=True, session=session + ) + await role_repository.create_all(roles, replace=True, session=session) + await role_resource_repository.create_all( + role_resources, replace=True, session=session + ) + admin_users = await user_role_repository.find( + role_id=user_role_id, session=session + ) + + permissions: list[UserRole] = [] + if admin_users and len(admin_users) > 0: + for user in admin_users: + for role in roles: + permissions.append( + UserRole(user_id=user.user_id, role_id=role.id) + ) + + await user_role_repository.create_all( + permissions, replace=True, session=session + ) + + await session.commit() + + resource_count = len(payload.resources) + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + data={'message': f'Created {resource_count} resources successfully'} + ), + ) + + +@access_router.post('/roles') +@inject +async def create_role( + request: Request, + payload: CreateRolePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[UserContainer.resource_repository] + ), + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[UserContainer.role_repository] + ), + role_resource_repository: SQLAlchemyRepository[RoleResource] = Depends( + Provide[UserContainer.role_resource_repository] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + resources = await resource_repository.find(id=payload.resources) + + unknown_resource_count = len(payload.resources) - len(resources) + if len(payload.resources) != len(resources): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Found {unknown_resource_count} unknown resource(s) in the payload. Remove these resources from the payload or create these resources and then proceed' + ), + ) + + role_id = None + # Check if a role already exists for the given resources + async with role_resource_repository.session() as session: + stmt = ( + select(RoleResource.role_id) + .where(RoleResource.resource_id.in_(payload.resources)) + .group_by(RoleResource.role_id) + .having( + func.count(func.distinct(RoleResource.resource_id)) + == len(payload.resources) + ) + ) + result: Result = await session.execute(stmt) + role_id = result.scalar() + + if role_id: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildSuccessResponse( + data={ + 'message': 'Role already exists for the given resources', + 'role_id': str(role_id), + } + ), + ) + else: + role = { + 'name': payload.name, + 'description': payload.description, + } + role: Role = await role_repository.create(**role) + role_resources = [ + RoleResource(resource_id=resource, role_id=role.id) + for resource in payload.resources + ] + await role_resource_repository.create_all(role_resources) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + data={ + 'message': 'Created role successfully', + 'role_id': str(role.id), + } + ), + ) + + +@access_router.get('/resources') +@inject +async def get_resource( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + scopes: list[str] = Query( + default=[ResourceScope.DASHBOARD, ResourceScope.CONSOLE], + description='The scopes of the resources to fetch', + ), +): + _, user_id, _ = get_current_user(request) + + resources = await user_service.get_user_resources(user_id=user_id, scopes=scopes) + + data = [res.to_dict() for res in resources] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'resources': data}), + ) + + +@access_router.get('/roles') +@inject +async def get_role( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[UserContainer.role_repository] + ), + scopes: list[str] = Query( + default=[ResourceScope.CONSOLE], description='The scopes of the roles to fetch' + ), + select_item: Optional[str] = None, +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + item_to_select = select_item.split(',') if select_item else [] + valid_columns = [] + for item in item_to_select: + if not getattr(Role, item): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + error=f'Invalid column {item}' + ), + ) + valid_columns.append(getattr(Role, item)) + + async with role_repository.session() as session: + if valid_columns: + statement = select(Role).options(selectinload(Role.resources)) + result = await session.execute(statement) + roles = result.scalars().unique().all() + data = [] + for role in roles: + role_dict = {} + for col in item_to_select: + if col == 'resources': + role_dict[col] = [ + resource.to_dict() for resource in role.resources + ] + else: + role_dict[col] = str(getattr(role, col)) + data.append(role_dict) + else: + statement = ( + select(Role) + .join(RoleResource, Role.id == RoleResource.role_id) + .join(Resource, Resource.id == RoleResource.resource_id) + .where(Resource.scope.in_(scopes)) + ) + result: Result = await session.execute(statement) + roles = result.scalars().all() + + data = [role.to_dict() for role in roles] + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(data={'roles': data}), + ) + + +@access_router.patch('/resources/{resource_id}') +@inject +async def patch_resources( + request: Request, + resource_id: str, + payload: UpdateResourcePayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter], + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[UserContainer.resource_repository] + ), +): + user_role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(user_role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + # Explicitly extract fields that can be updated + update_fields = {} + if payload.key is not None: + update_fields['key'] = payload.key + if payload.value is not None: + update_fields['value'] = payload.value + if payload.description is not None: + update_fields['description'] = payload.description + if payload.scope is not None: + update_fields['scope'] = payload.scope + if payload.meta is not None: + update_fields['meta'] = payload.meta + + if not update_fields: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'No fields provided for update' + ), + ) + await resource_repository.find_one_and_update({'id': resource_id}, **update_fields) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'message': 'Resource updated successfully'} + ), + ) + + +@access_router.delete('/resources/{resource_id}') +@inject +async def delete_resources( + request: Request, + resource_id: str, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter], + ), + resource_repository: SQLAlchemyRepository[Resource] = Depends( + Provide[UserContainer.resource_repository] + ), +): + user_role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(user_role_id) + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + delete_resource = await resource_repository.find(id=resource_id) + if not delete_resource: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Resource not found with the given ID.' + ), + ) + await resource_repository.delete_all(id=resource_id) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + data={'message': 'Resource deleted successfully'} + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_controller.py b/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_controller.py new file mode 100644 index 00000000..b1fd69ad --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_controller.py @@ -0,0 +1,293 @@ +import json +from uuid import uuid4 + +from auth_module.auth_container import AuthContainer +from auth_module.services.token_service import TokenService +from authlib.integrations.starlette_client import OAuth +from common_module.common_container import CommonContainer +from common_module.feature.feature_flag import ( + INACTIVE_ACCOUNT_DISABLE_FLAG, + is_feature_enabled, +) +from common_module.response_formatter import ResponseFormatter +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import APIRouter +from fastapi import Depends +from fastapi import Request +from fastapi import status +from fastapi.responses import JSONResponse +from pydantic import BaseModel +from user_management_module.models.oauth_provider import OAuthProviderConfig +from user_management_module.services.account_lockout_service import ( + AccountLockoutService, +) +from user_management_module.services.account_inactivity_service import ( + AccountInactivityService, +) +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from user_management_module.utils.password_utils import verify_password +from user_management_module.utils.user_utils import create_account_lockout_response +from user_management_module.utils.user_utils import get_session_cache_key + +auth_router = APIRouter(prefix='/v1') +oauth = OAuth() + + +class AuthRequest(BaseModel): + email: str + password: str + + +@auth_router.get('/health') +def health_check(): + return {'status': 'ok'} + + +@auth_router.post('/authenticate') +@inject +async def authenticate( + request: Request, + auth_data: AuthRequest, + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[UserContainer.session_repository] + ), + cache_manager: CacheManager = Depends(Provide[CommonContainer.cache_manager]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + account_lockout_service: AccountLockoutService = Depends( + Provide[UserContainer.account_lockout_service] + ), + account_inactivity_service: AccountInactivityService = Depends( + Provide[UserContainer.account_inactivity_service] + ), +): + # Check if account is locked before attempting authentication + is_locked, locked_until = await account_lockout_service.check_account_lockout( + auth_data.email + ) + if is_locked: + return create_account_lockout_response( + locked_until, account_lockout_service, response_formatter + ) + + user = await user_repository.find_one(email=auth_data.email) + + # Check for account inactivity if feature is enabled and user exists + if user and is_feature_enabled(INACTIVE_ACCOUNT_DISABLE_FLAG): + ( + is_inactive, + days_since_login, + ) = await account_inactivity_service.check_account_inactivity(user) + if is_inactive: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + f'Account has been disabled due to inactivity. Last login was {days_since_login} days ago.' + ), + ) + + if user is None or not verify_password(auth_data.password, user.password): + # Handle failed login attempt + if user: # Only track attempts for existing users + ( + is_now_locked, + locked_until, + ) = await account_lockout_service.handle_failed_login(user) + if is_now_locked: + return create_account_lockout_response( + locked_until, account_lockout_service, response_formatter + ) + + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'Incorrect username or password' + ), + ) + if user.deleted: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse('User account is disabled'), + ) + + # Reset failed attempts on successful login + await account_lockout_service.reset_failed_attempts(user) + + # Update last login timestamp + await account_inactivity_service.update_last_login(user) + + # Get device info from headers + device_info = request.headers.get('User-Agent') + + existing_sessions = await session_repository.find(user_id=user.id, limit=1000) + for s in existing_sessions: + cache_manager.remove(get_session_cache_key(s.id)) + await session_repository.delete_all(user_id=user.id) + + # Create new session + session = await session_repository.create( + user_id=user.id, device_info=device_info, id=uuid4() + ) + + # Cache session data + session_cache_key = get_session_cache_key(session.id) + session_data = { + 'id': str(session.id), + 'user_id': str(session.user_id), + 'device_info': session.device_info, + } + cache_manager.add( + session_cache_key, + json.dumps(session_data), + token_service.token_expiry, + ) + + role_id = await user_service.get_user_role_for_scope( + user_id=str(user.id), scope=ResourceScope.CONSOLE + ) + + if not role_id: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'User has no access to the console' + ), + ) + + # Include session_id in token payload + token = token_service.create_token( + sub=user.email, + user_id=str(user.id), + role_id=role_id, + payload={'session_id': str(session.id)}, + expiry=token_service.token_expiry, + ) + + response_data = {'access_token': token, 'token_type': 'bearer'} + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': response_data}), + ) + + +@auth_router.post('/authenticate/config') +@inject +def config_oauth( + config: OAuthProviderConfig, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + oauth.register( + name=config.name, + client_id=config.client_id, + client_secret=config.client_secret, + redirect_uri=config.redirect_uri, + client_kwargs=config.client_kwargs, + server_metadata_url=config.server_metadata_url, + ) + response_data = f'{config.name} OAuth provider registered successfully.' + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'message': response_data}), + ) + + +@auth_router.get('/google/login') +async def google_login(request: Request): + redirect_uri = str(request.url_for('google_callback')) + return await oauth.google.authorize_redirect(request, redirect_uri) + + +@auth_router.get('/google/login/callback') +@inject +async def google_callback( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + token = await oauth.google.authorize_access_token(request) + user = token['userinfo'] + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': dict(user)}), + ) + + +# @auth_router.get('/azure/login') +# @inject +# async def azure_login( +# request: Request, +# azure_connector: AzureConnector = Depends(Provide[AuthContainer.azure_connector]), +# ): +# try: +# url = azure_connector.get_authorization_url() +# return RedirectResponse(url=url) +# except Exception as e: +# raise HTTPException(status_code=500, detail=str(e)) + + +# @auth_router.get('/oauth/azure/callback') +# @inject +# async def azure_callback( +# request: Request, +# code: str, +# azure_connector: AzureConnector = Depends(Provide[AuthContainer.azure_connector]), +# response_formatter: ResponseFormatter = Depends( +# Provide[CommonContainer.response_formatter] +# ), +# ): +# try: +# token = azure_connector.get_credentials_after_auth(code) +# creds = azure_connector.get_credentials(token) +# user = await azure_connector.get_user(creds) +# return JSONResponse( +# status_code=status.HTTP_200_OK, +# content=response_formatter.buildSuccessResponse({'user': user.mail}), +# ) +# except Exception as e: +# raise HTTPException(status_code=500, detail=str(e)) + + +@auth_router.post('/logout') +@inject +async def logout( + request: Request, + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[UserContainer.session_repository] + ), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), +): + # Get the current session from request state + current_session = request.state.session + + # Delete the session from database + await session_repository.delete_all(id=current_session.session_id) + + # Clear both user and session cache + cache_manager.remove(current_session.user_id) + cache_manager.remove(get_session_cache_key(current_session.session_id)) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Successfully logged out'} + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_plugin_controller.py b/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_plugin_controller.py new file mode 100644 index 00000000..f634ff00 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/controllers/auth_plugin_controller.py @@ -0,0 +1,622 @@ +import json +from uuid import uuid4 +from db_repo_module.models.resource import ResourceScope +from dependency_injector.wiring import inject, Provide +from fastapi import Depends, Request, status, APIRouter, Query +from fastapi.responses import JSONResponse, RedirectResponse +from urllib.parse import urlencode +from pydantic import BaseModel +from typing import Dict, Any, Optional +from uuid import UUID + + +from auth_module.auth_container import AuthContainer +from auth_module.services.token_service import TokenService +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.authenticator import Authenticator +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from plugins_module.plugins_container import PluginsContainer +from plugins_module.services.authenticator_services import ( + get_authenticator_instance, + get_authenticator_config, + get_authenticator_with_config, +) +from user_management_module.user_container import UserContainer +from user_management_module.services.user_service import UserService +from user_management_module.utils.password_utils import verify_password +from user_management_module.utils.user_utils import get_session_cache_key + +from authenticator import AuthenticatorType +from authenticator.helper import validate_email + + +auth_plugin_router = APIRouter() + + +class UnifiedAuthRequest(BaseModel): + auth_id: str + credentials: Dict[str, Any] + + +class OAuthInitRequest(BaseModel): + auth_id: str + + +@auth_plugin_router.post('/v1/plugin-auth/authenticate') +@inject +async def unified_authenticate( + request: Request, + auth_request: UnifiedAuthRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[UserContainer.session_repository] + ), + cache_manager: CacheManager = Depends(Provide[CommonContainer.cache_manager]), + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), +): + """Unified authentication endpoint that routes to appropriate authenticator.""" + + try: + # Get authenticator instance and config by ID + auth_id = UUID(auth_request.auth_id) + authenticator, config_data = await get_authenticator_with_config( + auth_id, authenticator_repository + ) + + # Handle not found case (both None) + if config_data is None: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Authenticator {auth_request.auth_id} not found' + ), + ) + + # Handle disabled case (config exists but instance is None) + if authenticator is None: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + f'Authenticator {auth_request.auth_id} is not enabled' + ), + ) + + # Handle email/password authentication separately for existing user validation + if config_data['auth_type'] == AuthenticatorType.EMAIL_PASSWORD.value: + return await _handle_email_password_auth( + auth_request.credentials, + request, + response_formatter, + user_service, + user_repository, + session_repository, + cache_manager, + token_service, + ) + + # Handle OAuth authentication (Google: {authentication_code, state}) + auth_result = authenticator.authenticate(auth_request.credentials) + + if not auth_result.success: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + auth_result.error or 'Authentication failed' + ), + ) + + # Create session from auth result + user = await user_repository.find_one(email=auth_result.user_info.email) + if user is None: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + "User with email doesn't exist" + ), + ) + if user.deleted: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'User account is disabled' + ), + ) + + # Get device info from headers + device_info = request.headers.get('User-Agent') + + # Create new session + session = await session_repository.create( + user_id=user.id, device_info=device_info, id=uuid4() + ) + + # Cache session data + session_cache_key = get_session_cache_key(session.id) + session_data = { + 'id': str(session.id), + 'user_id': str(session.user_id), + 'device_info': session.device_info, + } + cache_manager.add( + session_cache_key, + json.dumps(session_data), + token_service.token_expiry, + ) + + role_id = await user_service.get_user_role_for_scope( + user_id=str(user.id), scope=ResourceScope.CONSOLE + ) + + if not role_id: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'User has no access to the console' + ), + ) + + # Include session_id in token payload + token = token_service.create_token( + sub=user.email, + user_id=str(user.id), + role_id=role_id, + payload={'session_id': str(session.id)}, + expiry=token_service.token_expiry, + ) + + response_data = {'access_token': token, 'token_type': 'bearer'} + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': response_data}), + ) + + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Authentication failed: {str(e)}' + ), + ) + + +# For google and microsoft oauth +@auth_plugin_router.post('/v1/plugin-auth/oauth/init') +@inject +async def init_oauth_flow( + request: Request, + oauth_request: OAuthInitRequest, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), +): + """Initialize OAuth flow and return authorization URL.""" + + try: + # Get authenticator instance by ID + auth_id = UUID(oauth_request.auth_id) + authenticator = await get_authenticator_instance( + auth_id, authenticator_repository + ) + + if not authenticator: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Authenticator {oauth_request.auth_id} not configured' + ), + ) + + # Generate state and get authorization URL + state = json.dumps({'auth_id': oauth_request.auth_id}) + auth_url = authenticator.get_authorization_url(state) + + if not auth_url: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Authenticator {oauth_request.auth_id} does not support OAuth' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'authorization_url': auth_url, 'state': state} + ), + ) + + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to initialize OAuth flow: {str(e)}' + ), + ) + + +@auth_plugin_router.get('/v1/oauth/google/callback') +@inject +async def google_oauth_callback( + request: Request, + code: str = Query(...), + state: str = Query(...), + error: Optional[str] = Query(None), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[UserContainer.session_repository] + ), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), +): + """Handle Google OAuth callback.""" + state_obj = json.loads(state) + auth_id = state_obj['auth_id'] + + return await _handle_oauth_callback( + auth_id, + {'authorization_code': code, 'state': state, 'error': error}, + request, + response_formatter, + authenticator_repository, + user_service, + user_repository, + session_repository, + cache_manager, + token_service, + ) + + +@auth_plugin_router.get('/v1/oauth/microsoft/callback') +@inject +async def microsoft_oauth_callback( + request: Request, + code: Optional[str] = Query( + ... + ), # keeping it optional as in error scenarios we dont get code + state: str = Query(...), + error: Optional[str] = Query(None), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + authenticator_repository: SQLAlchemyRepository[Authenticator] = Depends( + Provide[PluginsContainer.authenticator_repository] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + session_repository: SQLAlchemyRepository[Session] = Depends( + Provide[UserContainer.session_repository] + ), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), +): + """Handle Microsoft OAuth callback.""" + state_obj = json.loads(state) + auth_id = state_obj['auth_id'] + + return await _handle_oauth_callback( + auth_id, + {'authorization_code': code, 'state': state, 'error': error}, + request, + response_formatter, + authenticator_repository, + user_service, + user_repository, + session_repository, + cache_manager, + token_service, + ) + + +async def _handle_oauth_callback( + auth_id: str, + callback_data: Dict[str, Any], + request: Request, + response_formatter: ResponseFormatter, + authenticator_repository: SQLAlchemyRepository[Authenticator], + user_service: UserService, + user_repository: SQLAlchemyRepository[User], + session_repository: SQLAlchemyRepository[Session], + cache_manager: CacheManager, + token_service: TokenService, +) -> RedirectResponse: + """Common OAuth callback handler.""" + + try: + # Get authenticator instance and config + auth_uuid = UUID(auth_id) + authenticator, config_data = await get_authenticator_with_config( + auth_uuid, authenticator_repository + ) + + # Helper to get failure URL from config + def get_failure_redirect(error_msg: str) -> RedirectResponse: + if config_data: + failure_url = config_data.get('config', {}).get( + 'client_redirect_failure_url' + ) + if failure_url: + provider = config_data.get('auth_type') + params = urlencode({'provider': provider, 'error': error_msg}) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + # Handle not found case + if config_data is None: + return get_failure_redirect(f'Authenticator {auth_id} not found') + + # Handle disabled case + if authenticator is None: + return get_failure_redirect(f'Authenticator {auth_id} is not enabled') + + # Extract redirect URLs + provider = config_data.get('auth_type') + success_url = config_data.get('config', {}).get('client_redirect_success_url') + failure_url = config_data.get('config', {}).get('client_redirect_failure_url') + + # Handle OAuth error from provider + if callback_data.get('error'): + if failure_url: + params = urlencode( + { + 'provider': provider, + 'error': f'OAuth error: {callback_data["error"]}', + } + ) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + # Handle OAuth callback + auth_result = authenticator.handle_callback(callback_data) + + if not auth_result.success: + if failure_url: + params = urlencode( + { + 'provider': provider, + 'error': auth_result.error or 'OAuth authentication failed', + } + ) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + # Create session from auth result + user = await user_repository.find_one(email=auth_result.user_info.email) + if user is None: + if failure_url: + params = urlencode( + {'provider': provider, 'error': "User with email doesn't exist"} + ) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + if user.deleted: + if failure_url: + params = urlencode( + {'provider': provider, 'error': 'User account is disabled'} + ) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + # Get device info from headers + device_info = request.headers.get('User-Agent') + + # Create new session + session = await session_repository.create( + user_id=user.id, device_info=device_info, id=uuid4() + ) + + # Cache session data + session_cache_key = get_session_cache_key(session.id) + session_data = { + 'id': str(session.id), + 'user_id': str(session.user_id), + 'device_info': session.device_info, + } + cache_manager.add( + session_cache_key, + json.dumps(session_data), + token_service.token_expiry, + ) + + role_id = await user_service.get_user_role_for_scope( + user_id=str(user.id), scope=ResourceScope.CONSOLE + ) + + if not role_id: + if failure_url: + params = urlencode( + {'provider': provider, 'error': 'User has no access to the console'} + ) + return RedirectResponse(url=f'{failure_url}?{params}') + return RedirectResponse(url='about:blank') + + # Include session_id in token payload + token = token_service.create_token( + sub=user.email, + user_id=str(user.id), + role_id=role_id, + payload={'session_id': str(session.id)}, + expiry=token_service.token_expiry, + ) + + # Success: redirect to success URL with access token + if success_url: + params = urlencode({'provider': provider, 'access_token': token}) + return RedirectResponse(url=f'{success_url}?{params}') + + return RedirectResponse(url='about:blank') + + except Exception as e: + # Try to get config for failure URL + try: + auth_uuid = UUID(auth_id) + config_data = await get_authenticator_config( + auth_uuid, authenticator_repository + ) + if config_data: + failure_url = config_data.get('config', {}).get( + 'client_redirect_failure_url' + ) + if failure_url: + provider = config_data.get('auth_type') + params = urlencode( + { + 'provider': provider, + 'error': f'OAuth callback failed: {str(e)}', + } + ) + return RedirectResponse(url=f'{failure_url}?{params}') + except Exception as e: + pass + + return RedirectResponse(url='about:blank') + + +async def _handle_email_password_auth( + credentials: Dict[str, Any], + request: Request, + response_formatter: ResponseFormatter, + user_service: UserService, + user_repository: SQLAlchemyRepository[User], + session_repository: SQLAlchemyRepository[Session], + cache_manager: CacheManager, + token_service: TokenService, +) -> JSONResponse: + """Handle email/password authentication with existing user validation.""" + + email = credentials.get('email') + password = credentials.get('password') + + if not email or not password: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Email and password are required' + ), + ) + + if not validate_email(email): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('Invalid email format'), + ) + + try: + # Find user in database + user = await user_repository.find_one(email=email) + + # Check if user exists first (before accessing any attributes) + if not user: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + 'Incorrect email or password' + ), + ) + + # Check if user account is disabled + if user.deleted: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'User account is disabled' + ), + ) + + # Verify password (user is guaranteed to exist here) + if not verify_password(password, user.password): + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + 'Incorrect email or password' + ), + ) + + # Get device info + device_info = request.headers.get('User-Agent') + + # Create new session + session = await session_repository.create( + user_id=user.id, device_info=device_info, id=uuid4() + ) + + # Cache session data + session_cache_key = get_session_cache_key(session.id) + session_data = { + 'id': str(session.id), + 'user_id': str(session.user_id), + 'device_info': session.device_info, + } + cache_manager.add( + session_cache_key, + json.dumps(session_data), + token_service.token_expiry, + ) + + role_id = await user_service.get_user_role_for_scope( + user_id=str(user.id), scope=ResourceScope.CONSOLE + ) + + if not role_id: + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'User has no access to the console' + ), + ) + + # Create JWT token with session information + token = token_service.create_token( + sub=user.email, + user_id=str(user.id), + role_id=role_id, + payload={'session_id': str(session.id), 'auth_provider': 'email_password'}, + expiry=token_service.token_expiry, + ) + + response_data = { + 'access_token': token, + 'token_type': 'bearer', + 'session_id': str(session.id), + 'expires_in': token_service.token_expiry, + } + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': response_data}), + ) + + except Exception as e: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Authentication failed: {str(e)}' + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/controllers/user_controller.py b/wavefront/server/modules/user_management_module/user_management_module/controllers/user_controller.py new file mode 100644 index 00000000..970354ee --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/controllers/user_controller.py @@ -0,0 +1,602 @@ +import secrets +from typing import Optional + +from auth_module.auth_container import AuthContainer +from auth_module.services.token_service import TokenService +from common_module.common_cache import CommonCache +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from db_repo_module.models.resource import Resource +from db_repo_module.models.resource import ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.cache.cache_manager import CacheManager +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Path, Query +from fastapi import Request +from fastapi import status +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from fastapi.routing import APIRouter +from fastapi.security import OAuth2PasswordBearer +import jwt +from sqlalchemy import and_ +from sqlalchemy import delete +from sqlalchemy import select +from sqlalchemy import or_ +from sqlalchemy import func + +from user_management_module.models.user_schema import NewUser +from user_management_module.models.user_schema import ResetUser +from user_management_module.models.user_schema import UpdateUser +from user_management_module.services.email_service import EmailService +from user_management_module.services.account_lockout_service import ( + AccountLockoutService, +) +from user_management_module.user_container import UserContainer +from user_management_module.utils.password_utils import hash_password +from user_management_module.utils.user_utils import ( + check_is_admin, + create_account_lockout_response, +) +from user_management_module.utils.user_utils import get_current_user +from user_management_module.services.user_service import UserService +import json +from typing import List +from common_module.utils.serializer import serialize_values + +user_router = APIRouter(prefix='/v1') + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl='token') + + +@user_router.post('/users') +@inject +async def create_user( + new_user: NewUser, + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[UserContainer.role_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + existing_user = await user_repository.find_one(email=new_user.email) + if existing_user: + if existing_user.deleted: + return await user_service.reactivate_user( + existing_user, new_user, role_id, response_formatter + ) + else: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'User with the same email already exists' + ), + ) + + async with user_repository.session() as session: + try: + get_console_resources_query = ( + select(Resource) + .join(RoleResource, Resource.id == RoleResource.resource_id) + .where( + and_( + RoleResource.role_id.in_(new_user.role_id), + Resource.scope == ResourceScope.CONSOLE, + ) + ) + ) + result = await session.execute(get_console_resources_query) + console_resources = result.scalars().all() + if len(console_resources) == 0: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Atleast one console resource is mandatory' + ), + ) + + hashed_password = hash_password(new_user.password) + user = User( + email=new_user.email, + password=hashed_password, + first_name=new_user.first_name, + last_name=new_user.last_name, + ) + + # Check for valid roles + query = select(Role).where(Role.id.in_(new_user.role_id)) + result = await session.execute(query) + existing_roles = result.scalars().all() + existing_role_ids = {str(role.id) for role in existing_roles} + + invalid_roles = set(new_user.role_id) - existing_role_ids + if invalid_roles: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid role IDs: {", ".join(invalid_roles)}' + ), + ) + + # Create user + session.add(user) + await session.flush() + user_id = user.id + + if role_id in new_user.role_id: # Is creating admin user + all_roles = await role_repository.find() + user_roles = [ + UserRole(user_id=user_id, role_id=role.id) for role in all_roles + ] + else: # Is creating user with role other than admin + user_roles = [ + UserRole(user_id=user_id, role_id=role_id) + for role_id in new_user.role_id + ] + session.add_all(user_roles) + + await session.commit() + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Created user successfully', + 'user_id': str(user_id), + } + ), + ) + + except Exception as e: + await session.rollback() + logger.error(f'Error while creating user, {e}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse('Failed to create user'), + ) + + +@user_router.patch('/users') +@inject +async def update_user( + update_user: UpdateUser, + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_role_repository: SQLAlchemyRepository[UserRole] = Depends( + Provide[UserContainer.user_role_repository] + ), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + async with user_role_repository.session() as session: + # Check for valid roles + query = select(Role).where(Role.id.in_(update_user.add_role_ids)) + result = await session.execute(query) + existing_roles = result.scalars().all() + existing_role_ids = {str(role.id) for role in existing_roles} + + invalid_roles = set(update_user.add_role_ids) - existing_role_ids + if invalid_roles: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid role IDs: {", ".join(invalid_roles)}' + ), + ) + + admins = await user_role_repository.find(role_id=role_id) + if len(admins) == 1 and str(update_user.user_id) == str(admins[0].user_id): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + error='Atleast one admin is mandatory, please assign another user as admin before updating this user.' + ), + ) + + user_roles = [ + UserRole(user_id=update_user.user_id, role_id=id) + for id in update_user.add_role_ids + ] + session.add_all(user_roles) + + if ( + update_user.delete_role_ids is not None + and len(update_user.delete_role_ids) > 0 + ): + query = delete(UserRole.__table__).where( + and_( + UserRole.user_id == update_user.user_id, + UserRole.role_id.in_(update_user.delete_role_ids), + ) + ) + await session.execute(query) + await session.commit() + + # Invalidate all user_data cache entries + cache_manager.invalidate_query('user_data_*') + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Updated successfully'} + ), + ) + + +@user_router.get('/users') +@inject +async def get_all_user( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), + search: Optional[str] = Query(None, description='Search by name or email'), + roles: Optional[List[str]] = Query(None, description='Filter by role name'), + limit: int = Query(100), + offset: int = Query(0), + force_fetch: int = Query(0), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + # checking the cache for the keys + cache_key = f'user_data_{offset}_{limit}_{search}_{roles}' + if not force_fetch: + cached_result = cache_manager.get_str(cache_key) + if cached_result: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'users': json.loads(cached_result)}, + ), + ) + async with user_repository.session() as session: + # Build query to combine all three tables + # Aggregated query with roles + query = ( + select( + User.id, + User.first_name, + User.last_name, + User.email, + func.array_agg( + func.json_build_object( + 'id', + Role.id, + 'name', + Role.name, + ) + ).label('roles'), + ) + .join(UserRole, User.id == UserRole.user_id) + .join(Role, UserRole.role_id == Role.id) + .where(User.deleted.is_(False)) + .group_by(User.id) + ) + + # Add search conditions + if search and search.strip(): + # for first name and last name search + name = search.split(' ') + filters = [] + if name[0]: + filters.append(User.first_name.ilike(f'%{name[0]}%')) + if len(name) > 1 and name[1]: + filters.append(User.last_name.ilike(f'%{name[1]}%')) + filters.append(User.email.ilike(f'%{search}%')) + query = query.where(or_(*filters)) + + # Add role filter + if roles: + query = query.where(Role.name.in_(roles)) + + query = query.offset(offset).limit(limit) + + # Execute query + result = await session.execute(query) + rows = result.all() + + # Cache and return result + serialize_result = serialize_values(rows) + cache_manager.add(cache_key, json.dumps(serialize_result), expiry=60 * 60) # 1 hour + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'users': serialize_result}), + ) + + +@user_router.delete('/users') +@inject +async def delete_user( + request: Request, + delete_id: str = Query(alias='id'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_role_repository: SQLAlchemyRepository[UserRole] = Depends( + Provide[UserContainer.user_role_repository] + ), + user_service: UserService = Depends(Provide[UserContainer.user_service]), + cache_manager: CacheManager = Depends(Provide[UserContainer.cache_manager]), +): + role_id, user_id, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + admins = await user_role_repository.find(role_id=role_id) + if len(admins) == 1 and user_id == delete_id: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Atleast one admin is mandatory, please assign another user as admin before deleting this user.' + ), + ) + + response = await user_service.delete_user(delete_id) + # Invalidate all user_data cache entries + cache_manager.invalidate_query('user_data_*') + + if response: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'User deleted successfully.'} + ), + ) + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('Failed to delete the user.'), + ) + + +@user_router.post('/user/send-reset-password-email') +@inject +async def send_reset_url( + email: str, + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), + user_reset_cache: CommonCache = Depends(Provide[CommonContainer.cache_manager]), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), + config=Depends(Provide[UserContainer.config]), + email_service: EmailService = Depends(Provide[UserContainer.email_service]), + account_lockout_service: AccountLockoutService = Depends( + Provide[UserContainer.account_lockout_service] + ), +): + try: + # checking if the user exists in the db + user_with_email = await user_repository.find_one(email=email) + if not user_with_email: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + error='No user found with this email ID.' + ), + ) + if user_with_email.deleted: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + error='No user found with this email ID.' + ), + ) + + is_locked, locked_until = await account_lockout_service.check_account_lockout( + email + ) + if is_locked: + return create_account_lockout_response( + locked_until, account_lockout_service, response_formatter + ) + + # creating an jwt token for reseting the password + random_digit = secrets.token_hex(16) + + decoded_url = token_service.create_token( + payload={'code': random_digit}, + is_temporary=True, + ) + + # creating the user in the user_reset table + user_reset_cache.add(random_digit, str(user_with_email.id), expiry=600) + + # generating the url + forget_url_link = f'{config["web"]["url"]}/reset-password?token={decoded_url}' + + # setting up the emial part + email_response = email_service.send_forget_password_email( + forget_url_link, email + ) + if email_response: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'A password reset link has been sent to your registered email address.', + } + ), + ) + else: + logger.error('Erro while sending email') + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'An error occurred while sending the email. Please verify your email address and try again later.' + ), + ) + except ValueError: + logger.error('Error in email sending credentials') + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Password reset failed. Please reach out to your administrator for assistance.' + ), + ) + + +@user_router.post('/user/reset-password') +@inject +async def reset_password( + reset_user: ResetUser, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + token_service: TokenService = Depends(Provide[AuthContainer.token_service]), + user_reset_cache: CommonCache = Depends(Provide[CommonContainer.cache_manager]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), +): + try: + decoded_url = token_service.decode_token(reset_user.secret_token) + existing_user_id = user_reset_cache.get_str(decoded_url['code']) + if not existing_user_id: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + "Sorry, we couldn't verify your identity, or your password reset link has expired. Please try again or request a new reset link." + ), + ) + hashed_password = hash_password(reset_user.new_password) + await user_repository.find_one_and_update( + {'id': existing_user_id}, password=hashed_password + ) + # removing the user from user reset table after updating the password + user_reset_cache.remove(decoded_url['code']) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'Your password has been updated successfully.'} + ), + ) + except jwt.ExpiredSignatureError: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse( + 'The password reset link has expired. Please request a new one.' + ), + ) + + +@user_router.get('/whoami') +@inject +async def get_resources( + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[UserContainer.user_repository] + ), +): + _, user_id, _ = get_current_user(request) + user = await user_repository.find_one(id=user_id) + + if not user: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('User not found'), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'user': user.to_dict()}), + ) + + +@user_router.patch('/users/{user_id}/unblock') +@inject +async def unblock_user( + user_id: str = Path(..., description='User id to unblock'), + request: Request = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + account_lockout_service: AccountLockoutService = Depends( + Provide[UserContainer.account_lockout_service] + ), +): + role_id, _, _ = get_current_user(request) + is_admin = await check_is_admin(role_id) + + if not is_admin: + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=response_formatter.buildErrorResponse('Access denied'), + ) + + try: + # Attempt to unblock user + success = await account_lockout_service.admin_unblock_user(user_id) + + if not success: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'User with user_id {user_id} not found' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': f'User account with user_id {user_id} has been successfully unblocked' + } + ), + ) + except Exception as e: + logger.error(f'Error unblocking user with user_id {user_id}: {e}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + 'Failed to unblock user account' + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/dependencies/authorization.py b/wavefront/server/modules/user_management_module/user_management_module/dependencies/authorization.py new file mode 100644 index 00000000..b2a43a61 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/dependencies/authorization.py @@ -0,0 +1,38 @@ +from dataclasses import dataclass +from typing import Annotated + +from fastapi import Depends +from fastapi import HTTPException +from fastapi import Request +from fastapi import status + + +@dataclass +class UserSession: + role: str + user_id: str + + +async def get_current_session(request: Request) -> UserSession: + """Get the current user session from request state""" + session = getattr(request.state, 'session', None) + if not session: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail='Not authenticated' + ) + return session + + +async def check_admin_role( + session: Annotated[UserSession, Depends(get_current_session)], +) -> UserSession: + """Check if the current user has admin role""" + if session.role != 'admin': + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, detail='Admin privileges required' + ) + return session + + +CurrentSession = Annotated[UserSession, Depends(get_current_session)] +AdminSession = Annotated[UserSession, Depends(check_admin_role)] diff --git a/wavefront/server/modules/user_management_module/user_management_module/models/oauth_provider.py b/wavefront/server/modules/user_management_module/user_management_module/models/oauth_provider.py new file mode 100644 index 00000000..f41480ed --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/models/oauth_provider.py @@ -0,0 +1,10 @@ +from pydantic import BaseModel + + +class OAuthProviderConfig(BaseModel): + name: str + client_id: str + client_secret: str + redirect_uri: str + client_kwargs: dict + server_metadata_url: str diff --git a/wavefront/server/modules/user_management_module/user_management_module/models/resource.py b/wavefront/server/modules/user_management_module/user_management_module/models/resource.py new file mode 100644 index 00000000..db2468f0 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/models/resource.py @@ -0,0 +1,68 @@ +from enum import Enum +import json +from typing import List, Optional + +from pydantic import BaseModel +from pydantic import field_validator + + +class AddableResourceScope(str, Enum): + DASHBOARD = 'dashboard' + DATA = 'data' + + +class Resource(BaseModel): + key: str + value: str + description: Optional[str] = None + scope: AddableResourceScope + meta: Optional[str] = None + + @field_validator('meta') + @classmethod + def validate_meta_for_scope( + cls, meta: Optional[str], values: dict + ) -> Optional[str]: + if not meta: + if values.data.get('scope') == AddableResourceScope.DASHBOARD: + raise ValueError('meta is required for dashboard resources') + return meta + + try: + meta_dict = json.loads(meta) + except json.JSONDecodeError: + raise ValueError('meta must be a valid JSON string') + + scope = values.data.get('scope') + if scope == AddableResourceScope.DASHBOARD: + required_fields = ['name', 'key', 'priority'] + if not all(field in meta_dict for field in required_fields): + raise ValueError( + f"Dashboard resources must include {', '.join(required_fields)} in meta" + ) + + return meta + + +class ResourcePayload(BaseModel): + resources: List[Resource] + + +class Role(BaseModel): + id: str + name: str + description: str + + +class CreateRolePayload(BaseModel): + name: str + description: Optional[str] + resources: List[str] + + +class UpdateResourcePayload(BaseModel): + key: Optional[str] = None + value: Optional[str] = None + description: Optional[str] = None + scope: Optional[AddableResourceScope] = None + meta: Optional[str] = None diff --git a/wavefront/server/modules/user_management_module/user_management_module/models/user_schema.py b/wavefront/server/modules/user_management_module/user_management_module/models/user_schema.py new file mode 100644 index 00000000..170041c1 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/models/user_schema.py @@ -0,0 +1,89 @@ +import re +from typing import List, Optional + +from pydantic import BaseModel +from pydantic import EmailStr +from pydantic import Field +from pydantic import field_validator + +PASSWORD_REGEX = r'^(?=.*[A-Za-z])(?=.*\d)(?=.*[@$!%*#?&])[A-Za-z\d@$!%*#?&]{8,}$' + + +class NewUser(BaseModel): + email: EmailStr = Field(..., max_length=254) # RFC 5321 standard max length + password: str = Field(..., min_length=8) + first_name: Optional[str] = Field(None, min_length=1, max_length=50) + last_name: Optional[str] = Field(None, max_length=50) + team_id: Optional[str] = None + role_id: List[str] = Field(..., min_length=1) + + @field_validator('email') + @classmethod + def validate_email_format(cls, v): + # Check for common email patterns + if not re.match(r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', v): + raise ValueError('Invalid email format') + + # Check for consecutive dots + if '..' in v: + raise ValueError('Email cannot contain consecutive dots') + + # Check for valid domain + domain = v.split('@')[1] + if len(domain.split('.')) < 2: + raise ValueError('Invalid email domain') + + # Check for maximum domain length (255 characters) + if len(domain) > 255: + raise ValueError('Email domain too long') + + # Check for valid TLD length (2-63 characters) + tld = domain.split('.')[-1] + if not 2 <= len(tld) <= 63: + raise ValueError('Invalid TLD length') + + return v.lower() # Normalize email to lowercase + + @field_validator('password') + @classmethod + def validate_password_strength(cls, v): + if not re.match(PASSWORD_REGEX, v): + raise ValueError( + 'Password must contain at least one letter, one number, and one special character' + ) + return v + + @field_validator('first_name') + @classmethod + def validate_name_format(cls, v): + if v is not None: + if not v.replace(' ', '').isalpha(): + raise ValueError('Name should only contain letters and spaces') + return v + + +class UpdateUser(BaseModel): + user_id: str = Field(..., min_length=1) + add_role_ids: Optional[List[str]] = Field(None) + delete_role_ids: Optional[List[str]] = Field(None) + + @field_validator('add_role_ids', 'delete_role_ids') + @classmethod + def validate_role_ids(cls, v): + if v is not None and len(set(v)) != len(v): + raise ValueError('Role IDs must be unique') + return v + + +class ResetUser(BaseModel): + secret_token: str = Field(..., min_length=1) + new_password: str = Field(..., min_length=8) + + @field_validator('new_password') + @classmethod + def validate_password_strength(cls, v): + if not re.match(PASSWORD_REGEX, v): + raise ValueError( + 'Password must contain at least one letter, one number, and one special character' + ) + return v diff --git a/wavefront/server/modules/user_management_module/user_management_module/router.py b/wavefront/server/modules/user_management_module/user_management_module/router.py new file mode 100644 index 00000000..bc073b32 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/router.py @@ -0,0 +1,15 @@ +from fastapi.routing import APIRouter +from user_management_module.controllers.access_controller import access_router +from user_management_module.controllers.auth_controller import auth_router + +# from user_management_module.controllers.saml_config_controller import saml_router +from user_management_module.controllers.user_controller import user_router +from user_management_module.controllers.auth_plugin_controller import auth_plugin_router + +user_management_router = APIRouter() + +user_management_router.include_router(auth_router) +# user_management_router.include_router(saml_router) +user_management_router.include_router(user_router) +user_management_router.include_router(access_router) +user_management_router.include_router(auth_plugin_router) diff --git a/wavefront/server/modules/user_management_module/user_management_module/services/account_inactivity_service.py b/wavefront/server/modules/user_management_module/user_management_module/services/account_inactivity_service.py new file mode 100644 index 00000000..fe4a7353 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/services/account_inactivity_service.py @@ -0,0 +1,60 @@ +from datetime import datetime, timezone +from typing import Optional, Tuple + +from common_module.log.logger import logger +from db_repo_module.models.user import User +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository + + +class AccountInactivityService: + def __init__( + self, user_repository: SQLAlchemyRepository[User], inactive_days_threshold=60 + ): + self.user_repository = user_repository + self.inactive_days_threshold = ( + int(inactive_days_threshold) if inactive_days_threshold else 60 + ) + + def _ensure_timezone_aware(self, dt: datetime) -> datetime: + """Ensure datetime is timezone-aware (assumes UTC if naive)""" + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt + + async def check_account_inactivity(self, user: User) -> Tuple[bool, Optional[int]]: + """ + Check if user account should be disabled due to inactivity. + Returns (should_be_disabled, days_since_last_login) + """ + # If user has never logged in, allow login (first time users) + if not user.last_login_at: + logger.info(f'User {user.email} has never logged in, allowing first login') + return False, None + + current_time = datetime.now(timezone.utc) + last_login_aware = self._ensure_timezone_aware(user.last_login_at) + time_diff = current_time - last_login_aware + days_since_login_precise = time_diff.total_seconds() / ( + 24 * 60 * 60 + ) # Fractional days for comparison + days_since_login_display = int(days_since_login_precise) # Integer for display + + is_inactive = days_since_login_precise > self.inactive_days_threshold + + if is_inactive: + logger.warning( + f'User {user.email} has been inactive for {days_since_login_display} days ' + f'(threshold: {self.inactive_days_threshold} days)' + ) + + return is_inactive, days_since_login_display + + async def update_last_login(self, user: User) -> None: + """Update user's last login timestamp on successful authentication""" + current_time = datetime.now(timezone.utc) + + await self.user_repository.find_one_and_update( + {'id': user.id}, last_login_at=current_time + ) + + logger.info(f'Updated last login timestamp for user {user.email}') diff --git a/wavefront/server/modules/user_management_module/user_management_module/services/account_lockout_service.py b/wavefront/server/modules/user_management_module/user_management_module/services/account_lockout_service.py new file mode 100644 index 00000000..554a63cf --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/services/account_lockout_service.py @@ -0,0 +1,204 @@ +from datetime import datetime, timedelta, timezone +from typing import Optional, Tuple + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.user import User +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository + + +class AccountLockoutService: + def __init__( + self, + user_repository: SQLAlchemyRepository[User], + cache_manager: CacheManager, + max_failed_attempts=3, + lockout_duration_hours=24, + ): + self.user_repository = user_repository + self.cache_manager = cache_manager + # Convert to int in case they come as strings from config + self.max_failed_attempts = ( + int(max_failed_attempts) if max_failed_attempts else 3 + ) + self.lockout_duration_hours = ( + int(lockout_duration_hours) if lockout_duration_hours else 24 + ) + + def _get_cache_key(self, user_email: str) -> str: + return f'locked:{user_email}' + + def _ensure_timezone_aware(self, dt: datetime) -> datetime: + """Ensure datetime is timezone-aware (assumes UTC if naive)""" + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + return dt + + def _parse_cached_datetime(self, iso_string: str) -> Optional[datetime]: + """Parse cached ISO datetime string safely""" + try: + return datetime.fromisoformat(iso_string) + except (ValueError, TypeError) as e: + logger.warning(f"Failed to parse cached datetime '{iso_string}': {e}") + return None + + async def check_account_lockout( + self, user_email: str + ) -> Tuple[bool, Optional[datetime]]: + """ + Check if user account is locked. + Returns (is_locked, locked_until_time) + """ + # First check cache for performance + cache_key = self._get_cache_key(user_email) + cached_lockout = self.cache_manager.get_str(cache_key) + + if cached_lockout: + # Parse cached locked_until datetime + cached_locked_until = self._parse_cached_datetime(cached_lockout) + if cached_locked_until: + logger.info( + f'User {user_email} is locked until {cached_locked_until} (from cache)' + ) + return True, cached_locked_until + # If parsing failed, fall through to database check + + # Check database as fallback + user = await self.user_repository.find_one(email=user_email) + if not user: + return False, None + + current_time = datetime.now(timezone.utc) + + if ( + user.locked_until + and self._ensure_timezone_aware(user.locked_until) >= current_time + ): + # User is locked, update cache with remaining time + locked_until_aware = self._ensure_timezone_aware(user.locked_until) + remaining_seconds = int((locked_until_aware - current_time).total_seconds()) + self.cache_manager.add( + cache_key, locked_until_aware.isoformat(), expiry=remaining_seconds + ) + logger.info(f'User {user_email} is locked until {user.locked_until}') + return True, user.locked_until + + return False, None + + async def handle_failed_login(self, user: User) -> Tuple[bool, Optional[datetime]]: + """ + Handle failed login attempt. Returns (is_now_locked, locked_until_time) + """ + + current_time = datetime.now(timezone.utc) + + # Reset attempts if enough time has passed or if this is the first failure + if ( + user.last_failed_attempt is None + or current_time - self._ensure_timezone_aware(user.last_failed_attempt) + > timedelta(hours=self.lockout_duration_hours) + ): + user.failed_attempts = 0 + user.locked_until = None + + # Increment failed attempts + user.failed_attempts += 1 + user.last_failed_attempt = current_time + + # Check if account should be locked + if user.failed_attempts >= self.max_failed_attempts: + user.locked_until = current_time + timedelta( + hours=self.lockout_duration_hours + ) + + # Set cache with lockout + cache_key = self._get_cache_key(user.email) + cache_expiry_seconds = self.lockout_duration_hours * 60 * 60 + self.cache_manager.add( + cache_key, user.locked_until.isoformat(), expiry=cache_expiry_seconds + ) + + await self.user_repository.find_one_and_update( + {'id': user.id}, + failed_attempts=user.failed_attempts, + locked_until=user.locked_until, + last_failed_attempt=user.last_failed_attempt, + ) + + logger.warning( + f'User {user.email} account locked due to {user.failed_attempts} failed attempts' + ) + return True, user.locked_until + + # Update user with new failed attempt count + await self.user_repository.find_one_and_update( + {'id': user.id}, + failed_attempts=user.failed_attempts, + locked_until=user.locked_until, + last_failed_attempt=user.last_failed_attempt, + ) + + logger.info( + f'Failed login for {user.email}. Attempts: {user.failed_attempts}/{self.max_failed_attempts}' + ) + return False, None + + async def reset_failed_attempts(self, user: User) -> None: + """Reset failed attempts on successful login""" + + if user.failed_attempts > 0 or user.locked_until or user.last_failed_attempt: + user.failed_attempts = 0 + user.locked_until = None + user.last_failed_attempt = None + await self.user_repository.find_one_and_update( + {'id': user.id}, + failed_attempts=user.failed_attempts, + locked_until=user.locked_until, + last_failed_attempt=user.last_failed_attempt, + ) + + # Clear cache + cache_key = self._get_cache_key(user.email) + self.cache_manager.remove(cache_key) + + logger.info(f'Reset failed attempts for user {user.email}') + + async def _reset_lockout(self, user: User) -> None: + """Internal method to reset lockout status""" + user.failed_attempts = 0 + user.locked_until = None + user.last_failed_attempt = None + await self.user_repository.find_one_and_update( + {'id': user.id}, + failed_attempts=user.failed_attempts, + locked_until=user.locked_until, + last_failed_attempt=user.last_failed_attempt, + ) + + # Clear cache + cache_key = self._get_cache_key(user.email) + self.cache_manager.remove(cache_key) + + def get_lockout_time_remaining(self, locked_until: datetime) -> int: + """Get remaining lockout time in seconds""" + if not locked_until: + return 0 + + current_time = datetime.now(timezone.utc) + locked_until_aware = self._ensure_timezone_aware(locked_until) + + if locked_until_aware <= current_time: + return 0 + + return int((locked_until_aware - current_time).total_seconds()) + + async def admin_unblock_user(self, user_id: str) -> bool: + """Admin method to manually unblock a user account""" + user = await self.user_repository.find_one(id=user_id) + if not user: + return False + + # Reset lockout status + await self._reset_lockout(user) + logger.info(f'Admin unblocked user account: {user_id}') + return True diff --git a/wavefront/server/modules/user_management_module/user_management_module/services/email_service.py b/wavefront/server/modules/user_management_module/user_management_module/services/email_service.py new file mode 100644 index 00000000..e5774f2e --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/services/email_service.py @@ -0,0 +1,198 @@ +from abc import ABC, abstractmethod +from common_module.log.logger import logger +from fastapi import HTTPException +from fastapi import status +import msal +import requests +import base64 +import json +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +from google.oauth2 import service_account +from googleapiclient.discovery import build + + +class EmailService(ABC): + @abstractmethod + def get_access_token(self): + pass + + @abstractmethod + def send_forget_password_email(self, forget_url_link: str, email: str) -> bool: + pass + + @abstractmethod + def send_email(self, subject: str, body: str, email_id: str) -> bool: + pass + + +class OutlookEmailService(EmailService): + def __init__(self, client_id, client_secret, tenant_id, email_sender): + self.client_id = client_id + self.client_secret = client_secret + self.tenant_id = tenant_id + self.email_sender = email_sender + + def get_access_token(self): + authority = f'https://login.microsoftonline.com/{self.tenant_id}' + app = msal.ConfidentialClientApplication( + self.client_id, self.client_secret, authority + ) + token = app.acquire_token_for_client( + scopes=['https://graph.microsoft.com/.default'] + ) + return token['access_token'] + + def send_forget_password_email(self, forget_url_link: str, email: str) -> bool: + access_token = self.get_access_token() + if not access_token: + logger.error('failed to obtain outlook access token') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authenticate while sending the email.', + ) + + url = f'https://graph.microsoft.com/v1.0/users/{self.email_sender}/sendMail' + + headers = { + 'Authorization': f'Bearer {access_token}', + 'Content-Type': 'application/json', + } + + email_data = { + 'message': { + 'subject': 'Reset Your Password', + 'body': { + 'contentType': 'HTML', + 'content': f""" +

Hello,

+

We received a request to reset your password. Click the link below to set a new password:

+

Reset Your Password

+

Note: This link is valid for 10 minutes. If you do not reset your password within this time, you will need to request a new link.

+

If you did not request this, please contact the administrator immediately.

+ """, + }, + 'toRecipients': [{'emailAddress': {'address': email}}], + } + } + + response = requests.post(url, headers=headers, json=email_data) + return response.status_code == 202 + + def send_email(self, subject: str, body: str, email_id: str) -> bool: + access_token = self.get_access_token() + if not access_token: + logger.error('failed to obtain outlook access token') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authenticate while sending the email.', + ) + url = f'https://graph.microsoft.com/v1.0/users/{self.email_sender}/sendMail' + headers = { + 'Authorization': f'Bearer {access_token}', + 'Content-Type': 'application/json', + } + email_data = { + 'message': { + 'subject': subject, + 'body': body, + 'toRecipients': [{'emailAddress': {'address': email_id}}], + } + } + response = requests.post(url, headers=headers, json=email_data) + return response.status_code == 202 + + +class GmailEmailService(EmailService): + def __init__(self, service_account_b64, email_sender, delegate_user): + self.email_sender = email_sender or delegate_user + self.delegate_user = delegate_user + self.scopes = ['https://www.googleapis.com/auth/gmail.send'] + + if not delegate_user: + raise Exception('Delegate user required for gmail') + + try: + decoded_json = base64.b64decode(service_account_b64).decode('utf-8') + self.service_account_info = json.loads(decoded_json) + except Exception as e: + raise Exception(f'Invalid Gmail service account configuration: {str(e)}') + + def get_access_token(self): + credentials = service_account.Credentials.from_service_account_info( + self.service_account_info, scopes=self.scopes + ) + + credentials = credentials.with_subject(self.delegate_user) + + return credentials + + def send_forget_password_email(self, forget_url_link: str, email: str) -> bool: + try: + credentials = self.get_access_token() + if not credentials: + logger.error('failed to obtain gmail access token') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authenticate while sending the email.', + ) + service = build('gmail', 'v1', credentials=credentials) + + message = MIMEMultipart() + message['to'] = email + message['from'] = self.email_sender + message['subject'] = 'Reset Your Password' + + html_content = f""" +

Hello,

+

We received a request to reset your password. Click the link below to set a new password:

+

Reset Your Password

+

Note: This link is valid for 10 minutes. If you do not reset your password within this time, you will need to request a new link.

+

If you did not request this, please contact the administrator immediately.

+ """ + + html_part = MIMEText(html_content, 'html') + message.attach(html_part) + + raw_message = base64.urlsafe_b64encode(message.as_bytes()).decode() + send_message = ( + service.users() + .messages() + .send(userId='me', body={'raw': raw_message}) + .execute() + ) + + logger.info(f"Gmail message sent successfully: {send_message['id']}") + return True + + except Exception as e: + logger.error(f'Error sending Gmail email: {e}') + return False + + def send_email(self, subject: str, body: str, email_id: str) -> bool: + try: + credentials = self.get_access_token() + if not credentials: + logger.error('failed to obtain gmail access token') + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authenticate while sending the email.', + ) + service = build('gmail', 'v1', credentials=credentials) + message = MIMEMultipart() + message['to'] = email_id + message['from'] = self.email_sender + message['subject'] = subject + message.attach(MIMEText(body, 'html')) + raw_message = base64.urlsafe_b64encode(message.as_bytes()).decode() + send_message = ( + service.users() + .messages() + .send(userId='me', body={'raw': raw_message}) + .execute() + ) + logger.info(f"Gmail message sent successfully: {send_message['id']}") + return True + except Exception as e: + logger.error(f'Error sending Gmail email: {e}') + return False diff --git a/wavefront/server/modules/user_management_module/user_management_module/services/user_service.py b/wavefront/server/modules/user_management_module/user_management_module/services/user_service.py new file mode 100644 index 00000000..1664bb20 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/services/user_service.py @@ -0,0 +1,218 @@ +from typing import List, Optional +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from db_repo_module.models.session import Session +from db_repo_module.models.resource import Resource, ResourceScope +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.cache.cache_manager import CacheManager +from sqlalchemy import select, Result, and_ +from common_module.response_formatter import ResponseFormatter +from common_module.log.logger import logger +from user_management_module.utils.password_utils import hash_password +from user_management_module.models.user_schema import NewUser +from fastapi.responses import JSONResponse +from fastapi import status + + +class UserService: + def __init__( + self, + user_repository: SQLAlchemyRepository[User], + user_role_repository: SQLAlchemyRepository[UserRole], + session_repository: SQLAlchemyRepository[Session], + resource_repository: SQLAlchemyRepository[Resource], + cache_manager: CacheManager, + ): + self.user_repository = user_repository + self.user_role_repository = user_role_repository + self.session_repository = session_repository + self.resource_repository = resource_repository + self.cache_manager = cache_manager + + async def get_user_resources( + self, + user_id: str, + scope: Optional[ResourceScope] = None, + scopes: Optional[List[ResourceScope]] = None, + ) -> List[Resource]: + """ + Fetch all resources accessible to a user based on their roles. + + Args: + user_id: The ID of the user + scope: Single scope to filter by (optional) + scopes: Multiple scopes to filter by (optional) + + Returns: + List of Resource objects the user has access to + """ + async with self.resource_repository.session() as session: + statement = ( + select(Resource) + .join(RoleResource, Resource.id == RoleResource.resource_id) + .join(Role, Role.id == RoleResource.role_id) + .join(UserRole, UserRole.role_id == Role.id) + .join(User, UserRole.user_id == User.id) + .where(UserRole.user_id == user_id) + .where(User.deleted.is_(False)) + ) + + # Apply scope filtering + if scope is not None: + statement = statement.where(Resource.scope == scope) + elif scopes is not None: + statement = statement.where(Resource.scope.in_(scopes)) + + result: Result = await session.execute(statement) + return result.scalars().all() + + async def get_user_role_for_scope( + self, user_id: str, scope: ResourceScope + ) -> Optional[str]: + """ + Get the user's role ID for a specific resource scope. + + Args: + user_id: The ID of the user + scope: The resource scope to check (usually ResourceScope.CONSOLE) + + Returns: + The role_id if user has access to the scope, None otherwise + """ + async with self.resource_repository.session() as session: + statement = ( + select(UserRole.role_id) + .join(Role, UserRole.role_id == Role.id) + .join(RoleResource, Role.id == RoleResource.role_id) + .join(Resource, RoleResource.resource_id == Resource.id) + .join(User, UserRole.user_id == User.id) + .where(UserRole.user_id == user_id) + .where(User.deleted.is_(False)) + .where(Resource.scope == scope) + ) + result: Result = await session.execute(statement) + return result.scalar() + + async def delete_user(self, user_id: str) -> bool: + await self.user_role_repository.delete_all(user_id=user_id) + + sessions = await self.session_repository.find(user_id=user_id, limit=1000) + for s in sessions: + self.cache_manager.remove(f'session_{s.id}') + + self.cache_manager.remove(user_id) + + await self.session_repository.delete_all(user_id=user_id) + + response = await self.user_repository.find_one_and_update( + {'id': user_id}, deleted=True + ) + return response is not None + + async def reactivate_user( + self, + existing_user: User, + new_user_data: NewUser, + current_admin_role_id: str, + response_formatter: ResponseFormatter, + ) -> JSONResponse: + try: + async with self.user_repository.session() as session: + # Validate roles first + role_query = select(Role).where(Role.id.in_(new_user_data.role_id)) + role_result = await session.execute(role_query) + existing_roles = role_result.scalars().all() + existing_role_ids = {str(role.id) for role in existing_roles} + + invalid_roles = set(new_user_data.role_id) - existing_role_ids + if invalid_roles: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid role IDs: {", ".join(invalid_roles)}' + ), + ) + + # Validate console resource requirement + console_resources_query = ( + select(Resource) + .join(RoleResource, Resource.id == RoleResource.resource_id) + .where( + and_( + RoleResource.role_id.in_(new_user_data.role_id), + Resource.scope == ResourceScope.CONSOLE, + ) + ) + ) + console_result = await session.execute(console_resources_query) + console_resources = console_result.scalars().all() + if len(console_resources) == 0: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Atleast one console resource is mandatory' + ), + ) + + user_updates = { + 'deleted': False, + 'password': hash_password(new_user_data.password), + 'first_name': new_user_data.first_name, + 'last_name': new_user_data.last_name, + 'failed_attempts': 0, + 'locked_until': None, + 'last_failed_attempt': None, + 'last_login_at': None, + } + + updated_user = await self.user_repository.find_one_and_update( + {'id': existing_user.id}, **user_updates + ) + + if not updated_user: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + 'Failed to update user' + ), + ) + + # Handle role assignments + if ( + current_admin_role_id in new_user_data.role_id + ): # Is creating admin user + all_roles = await session.execute(select(Role)) + all_roles_list = all_roles.scalars().all() + user_roles = [ + UserRole(user_id=existing_user.id, role_id=role.id) + for role in all_roles_list + ] + else: # Is creating user with specific roles + user_roles = [ + UserRole(user_id=existing_user.id, role_id=role_id) + for role_id in new_user_data.role_id + ] + + session.add_all(user_roles) + await session.commit() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'User account reactivated successfully', + 'user_id': str(existing_user.id), + } + ), + ) + + except Exception as e: + logger.error(f'Failed to reactivate user {existing_user.id}: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to reactivate user: {str(e)}' + ), + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/user_container.py b/wavefront/server/modules/user_management_module/user_management_module/user_container.py new file mode 100644 index 00000000..72496d54 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/user_container.py @@ -0,0 +1,99 @@ +from db_repo_module.models.resource import Resource +from db_repo_module.models.role import Role +from db_repo_module.models.role_resource import RoleResource +from db_repo_module.models.session import Session +from db_repo_module.models.user import User +from db_repo_module.models.user_role import UserRole +from db_repo_module.models.auth_secrets import AuthSecrets +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector import containers +from dependency_injector import providers +from user_management_module.services.user_service import UserService +from user_management_module.services.email_service import ( + OutlookEmailService, + GmailEmailService, +) +from user_management_module.services.account_lockout_service import ( + AccountLockoutService, +) +from user_management_module.services.account_inactivity_service import ( + AccountInactivityService, +) + + +class UserContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + db_client = providers.Dependency() + cache_manager = providers.Dependency() + user_repository = providers.Singleton( + SQLAlchemyRepository[User], model=User, db_client=db_client + ) + role_repository = providers.Singleton( + SQLAlchemyRepository[Role], model=Role, db_client=db_client + ) + resource_repository = providers.Singleton( + SQLAlchemyRepository[Resource], + model=Resource, + db_client=db_client, + ) + role_resource_repository = providers.Singleton( + SQLAlchemyRepository[RoleResource], + model=RoleResource, + db_client=db_client, + ) + user_role_repository = providers.Singleton( + SQLAlchemyRepository[UserRole], + model=UserRole, + db_client=db_client, + ) + session_repository = providers.Singleton( + SQLAlchemyRepository[Session], + model=Session, + db_client=db_client, + ) + + auth_secrets_repository = providers.Singleton( + SQLAlchemyRepository[AuthSecrets], + model=AuthSecrets, + db_client=db_client, + ) + + email_service = providers.Selector( + selector=config.email.email_provider, + outlook=providers.Singleton( + OutlookEmailService, + client_id=config.outlook.client_id, + client_secret=config.outlook.client_secret, + tenant_id=config.outlook.tenant_id, + email_sender=config.outlook.email_id, + ), + gmail=providers.Singleton( + GmailEmailService, + service_account_b64=config.gmail.service_account_file, + email_sender=config.gmail.email_sender, + delegate_user=config.gmail.delegate_user, + ), + ) + + user_service = providers.Singleton( + UserService, + user_repository=user_repository, + user_role_repository=user_role_repository, + session_repository=session_repository, + resource_repository=resource_repository, + cache_manager=cache_manager, + ) + + account_lockout_service = providers.Singleton( + AccountLockoutService, + user_repository=user_repository, + cache_manager=cache_manager, + max_failed_attempts=config.auth.max_failed_attempts, + lockout_duration_hours=config.auth.lockout_duration_hours, + ) + + account_inactivity_service = providers.Singleton( + AccountInactivityService, + user_repository=user_repository, + inactive_days_threshold=config.auth.inactive_days_threshold, + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/utils/password_utils.py b/wavefront/server/modules/user_management_module/user_management_module/utils/password_utils.py new file mode 100644 index 00000000..3bc6f837 --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/utils/password_utils.py @@ -0,0 +1,13 @@ +import bcrypt + + +def hash_password(password: str) -> str: + salt = bcrypt.gensalt() + hashed = bcrypt.hashpw(password.encode('utf-8'), salt) + return hashed.decode('utf-8') + + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return bcrypt.checkpw( + plain_password.encode('utf-8'), hashed_password.encode('utf-8') + ) diff --git a/wavefront/server/modules/user_management_module/user_management_module/utils/user_utils.py b/wavefront/server/modules/user_management_module/user_management_module/utils/user_utils.py new file mode 100644 index 00000000..3bc7b72a --- /dev/null +++ b/wavefront/server/modules/user_management_module/user_management_module/utils/user_utils.py @@ -0,0 +1,80 @@ +from datetime import datetime +from typing import Optional + +from common_module.response_formatter import ResponseFormatter +import uuid +from typing import Union +from db_repo_module.models.role import Role +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from dependency_injector.wiring import inject +from dependency_injector.wiring import Provide +from fastapi import Request +from fastapi import status +from fastapi.params import Depends +from fastapi.responses import JSONResponse +from user_management_module.services.account_lockout_service import ( + AccountLockoutService, +) +from user_management_module.user_container import UserContainer + + +def get_current_user(req: Request): + return ( + req.state.session.role_id, + req.state.session.user_id, + req.state.session.session_id + if hasattr(req.state, 'session') and req.state.session + else None, + ) + + +@inject +async def check_is_admin( + role_id: str, + role_repository: SQLAlchemyRepository[Role] = Depends( + Provide[UserContainer.role_repository] + ), +) -> bool: + role = await role_repository.find_one(id=role_id) + + if not role: + return False + + return role.name == 'admin' + + +def create_account_lockout_response( + locked_until: Optional[datetime], + account_lockout_service: AccountLockoutService, + response_formatter: ResponseFormatter, +) -> JSONResponse: + """ + Create a standardized account lockout response with remaining time information. + + Args: + locked_until: The datetime until which the account is locked + account_lockout_service: Service for calculating lockout time + response_formatter: Service for formatting API responses + + Returns: + JSONResponse with HTTP 423 status and lockout message + """ + if locked_until: + remaining_time = account_lockout_service.get_lockout_time_remaining( + locked_until + ) + hours = remaining_time // 3600 + minutes = (remaining_time % 3600) // 60 + time_msg = f'{hours}h {minutes}m' if hours > 0 else f'{minutes}m' + error_message = f'Account locked due to multiple failed login attempts. Try again in {time_msg}' + else: + error_message = 'Account locked due to multiple failed login attempts' + + return JSONResponse( + status_code=status.HTTP_423_LOCKED, + content=response_formatter.buildErrorResponse(error_message), + ) + + +def get_session_cache_key(session_id: Union[str, uuid.UUID]) -> str: + return f'session_{str(session_id)}' diff --git a/wavefront/server/modules/voice_agents_module/pyproject.toml b/wavefront/server/modules/voice_agents_module/pyproject.toml new file mode 100644 index 00000000..0ee695d4 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/pyproject.toml @@ -0,0 +1,34 @@ +[project] +name = "voice-agents-module" +version = "0.1.0" +description = "Voice agents module for configuration management" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +requires-python = ">=3.11" + +dependencies = [ + "common-module", + "db-repo-module", + "flo-cloud", + "twilio", + "httpx>=0.27.0,<1.0.0", +] + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } +flo-cloud = { workspace = true } + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["voice_agents_module"] diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/stt_config_controller.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/stt_config_controller.py new file mode 100644 index 00000000..484f32f8 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/stt_config_controller.py @@ -0,0 +1,250 @@ +import json +from uuid import UUID + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Path, Request, status +from fastapi.responses import JSONResponse + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from voice_agents_module.models.stt_schemas import ( + CreateSttConfigPayload, + UpdateSttConfigPayload, + SttProvider, + UNSET, +) +from voice_agents_module.services.stt_config_service import SttConfigService +from voice_agents_module.voice_agents_container import VoiceAgentsContainer + +stt_config_router = APIRouter() + + +@stt_config_router.post('/v1/stt-configs') +@inject +async def create_stt_config( + request: Request, + payload: CreateSttConfigPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + stt_config_service: SttConfigService = Depends( + Provide[VoiceAgentsContainer.stt_config_service] + ), +): + """ + Create a new STT configuration + + Creates a Speech-to-Text provider configuration. + + Args: + payload: Configuration details including provider, api_key, etc. + + Returns: + JSONResponse: Created configuration (api_key excluded) + """ + config = await stt_config_service.create_config( + display_name=payload.display_name, + description=payload.description, + provider=payload.provider.value, + api_key=payload.api_key, + language=payload.language, + parameters=payload.parameters, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'STT configuration created successfully', + 'stt_config_id': str(config['id']), + } + ), + ) + + +@stt_config_router.get('/v1/stt-configs') +@inject +async def list_stt_configs( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + stt_config_service: SttConfigService = Depends( + Provide[VoiceAgentsContainer.stt_config_service] + ), +): + """ + List all STT configurations + + Returns: + JSONResponse: List of configurations (api_key excluded) + """ + configs_data = await stt_config_service.list_configs() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'stt_configs': configs_data}), + ) + + +@stt_config_router.get('/v1/stt-configs/{config_id}') +@inject +async def get_stt_config( + config_id: UUID = Path(..., description='The ID of the STT configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + stt_config_service: SttConfigService = Depends( + Provide[VoiceAgentsContainer.stt_config_service] + ), +): + """ + Get a single STT configuration by ID + + Args: + config_id: UUID of the configuration to retrieve + + Returns: + JSONResponse: Configuration details (api_key excluded) + """ + config_dict = await stt_config_service.get_config(config_id) + + if not config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'STT configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(config_dict), + ) + + +@stt_config_router.put('/v1/stt-configs/{config_id}') +@inject +async def update_stt_config( + config_id: UUID = Path(..., description='The ID of the STT configuration'), + payload: UpdateSttConfigPayload = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + stt_config_service: SttConfigService = Depends( + Provide[VoiceAgentsContainer.stt_config_service] + ), +): + """ + Update an STT configuration + + Updates specified fields of an STT configuration. + Only provided fields will be updated. + + Args: + config_id: UUID of the configuration to update + payload: Fields to update + + Returns: + JSONResponse: Success message + """ + # Build update dict (only include set fields) + update_data = {} + if payload.display_name is not UNSET: + update_data['display_name'] = payload.display_name + if payload.description is not UNSET: + update_data['description'] = payload.description + if payload.provider is not UNSET: + if hasattr(payload.provider, 'value'): + # It's an enum object + update_data['provider'] = payload.provider.value + elif isinstance(payload.provider, str) and payload.provider in [ + e.value for e in SttProvider + ]: + # It's a valid enum value string + update_data['provider'] = payload.provider + else: + # Invalid value + valid_values = [e.value for e in SttProvider] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid provider value. Must be one of: {valid_values}' + ), + ) + if payload.api_key is not UNSET: + update_data['api_key'] = payload.api_key + if payload.language is not UNSET: + update_data['language'] = payload.language + if payload.parameters is not UNSET: + update_data['parameters'] = ( + json.dumps(payload.parameters) if payload.parameters else None + ) + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + updated_config = await stt_config_service.update_config(config_id, **update_data) + + if not updated_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'STT configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'STT configuration updated successfully', + 'stt_config_id': str(config_id), + } + ), + ) + + +@stt_config_router.delete('/v1/stt-configs/{config_id}') +@inject +async def delete_stt_config( + config_id: UUID = Path(..., description='The ID of the STT configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + stt_config_service: SttConfigService = Depends( + Provide[VoiceAgentsContainer.stt_config_service] + ), +): + """ + Delete an STT configuration (soft delete) + + Marks the configuration as deleted (sets is_deleted=True). + + Args: + config_id: UUID of the configuration to delete + + Returns: + JSONResponse: Success message + """ + deleted = await stt_config_service.delete_config(config_id) + + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'STT configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'STT configuration deleted successfully', + 'stt_config_id': str(config_id), + } + ), + ) diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/telephony_config_controller.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/telephony_config_controller.py new file mode 100644 index 00000000..3d41a2ab --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/telephony_config_controller.py @@ -0,0 +1,283 @@ +import json +from uuid import UUID + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Path, Request, status +from fastapi.responses import JSONResponse + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from voice_agents_module.models.telephony_schemas import ( + CreateTelephonyConfigPayload, + UpdateTelephonyConfigPayload, + TelephonyProvider, + ConnectionType, + UNSET, +) +from voice_agents_module.services.telephony_config_service import TelephonyConfigService +from voice_agents_module.voice_agents_container import VoiceAgentsContainer + +telephony_config_router = APIRouter() + + +@telephony_config_router.post('/v1/telephony-configs') +@inject +async def create_telephony_config( + request: Request, + payload: CreateTelephonyConfigPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + telephony_config_service: TelephonyConfigService = Depends( + Provide[VoiceAgentsContainer.telephony_config_service] + ), +): + """ + Create a new telephony configuration + + Creates a telephony provider configuration (Twilio) with connection settings. + For WebSocket connections, webhook_config with status_callback_url is required. + For SIP connections, sip_config with sip_domain is required. + + Args: + payload: Configuration details including provider, connection_type, credentials, etc. + + Returns: + JSONResponse: Created configuration (credentials excluded) + """ + config = await telephony_config_service.create_config( + display_name=payload.display_name, + description=payload.description, + provider=payload.provider.value, + connection_type=payload.connection_type.value, + credentials=payload.credentials, + phone_numbers=payload.phone_numbers, + webhook_config=payload.webhook_config, + sip_config=payload.sip_config, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Telephony configuration created successfully', + 'telephony_config_id': str(config['id']), + } + ), + ) + + +@telephony_config_router.get('/v1/telephony-configs') +@inject +async def list_telephony_configs( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + telephony_config_service: TelephonyConfigService = Depends( + Provide[VoiceAgentsContainer.telephony_config_service] + ), +): + """ + List all telephony configurations + + Returns: + JSONResponse: List of configurations (credentials excluded) + """ + configs_data = await telephony_config_service.list_configs() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'telephony_configs': configs_data} + ), + ) + + +@telephony_config_router.get('/v1/telephony-configs/{config_id}') +@inject +async def get_telephony_config( + config_id: UUID = Path(..., description='The ID of the telephony configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + telephony_config_service: TelephonyConfigService = Depends( + Provide[VoiceAgentsContainer.telephony_config_service] + ), +): + """ + Get a single telephony configuration by ID + + Args: + config_id: UUID of the configuration to retrieve + + Returns: + JSONResponse: Configuration details (credentials excluded) + """ + config_dict = await telephony_config_service.get_config(config_id) + + if not config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Telephony configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(config_dict), + ) + + +@telephony_config_router.put('/v1/telephony-configs/{config_id}') +@inject +async def update_telephony_config( + config_id: UUID = Path(..., description='The ID of the telephony configuration'), + payload: UpdateTelephonyConfigPayload = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + telephony_config_service: TelephonyConfigService = Depends( + Provide[VoiceAgentsContainer.telephony_config_service] + ), +): + """ + Update a telephony configuration + + Updates specified fields of a telephony configuration. + Only provided fields will be updated. + + Args: + config_id: UUID of the configuration to update + payload: Fields to update + + Returns: + JSONResponse: Success message + """ + # Build update dict (only include set fields) + update_data = {} + if payload.display_name is not UNSET: + update_data['display_name'] = payload.display_name + if payload.description is not UNSET: + update_data['description'] = payload.description + if payload.provider is not UNSET: + if hasattr(payload.provider, 'value'): + # It's an enum object + update_data['provider'] = payload.provider.value + elif isinstance(payload.provider, str) and payload.provider in [ + e.value for e in TelephonyProvider + ]: + # It's a valid enum value string + update_data['provider'] = payload.provider + else: + # Invalid value + valid_values = [e.value for e in TelephonyProvider] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid provider value. Must be one of: {valid_values}' + ), + ) + if payload.connection_type is not UNSET: + if hasattr(payload.connection_type, 'value'): + # It's an enum object + update_data['connection_type'] = payload.connection_type.value + elif isinstance(payload.connection_type, str) and payload.connection_type in [ + e.value for e in ConnectionType + ]: + # It's a valid enum value string + update_data['connection_type'] = payload.connection_type + else: + # Invalid value + valid_values = [e.value for e in ConnectionType] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid connection_type value. Must be one of: {valid_values}' + ), + ) + if payload.credentials is not UNSET: + update_data['credentials'] = json.dumps(payload.credentials) + if payload.phone_numbers is not UNSET: + update_data['phone_numbers'] = json.dumps(payload.phone_numbers) + if payload.webhook_config is not UNSET: + update_data['webhook_config'] = ( + json.dumps(payload.webhook_config.model_dump()) + if payload.webhook_config + else None + ) + if payload.sip_config is not UNSET: + update_data['sip_config'] = ( + json.dumps(payload.sip_config.model_dump()) if payload.sip_config else None + ) + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + updated_config = await telephony_config_service.update_config( + config_id, **update_data + ) + + if not updated_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Telephony configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Telephony configuration updated successfully', + 'telephony_config_id': str(config_id), + } + ), + ) + + +@telephony_config_router.delete('/v1/telephony-configs/{config_id}') +@inject +async def delete_telephony_config( + config_id: UUID = Path(..., description='The ID of the telephony configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + telephony_config_service: TelephonyConfigService = Depends( + Provide[VoiceAgentsContainer.telephony_config_service] + ), +): + """ + Delete a telephony configuration (soft delete) + + Marks the configuration as deleted (sets is_deleted=True). + + Args: + config_id: UUID of the configuration to delete + + Returns: + JSONResponse: Success message + """ + deleted = await telephony_config_service.delete_config(config_id) + + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Telephony configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Telephony configuration deleted successfully', + 'telephony_config_id': str(config_id), + } + ), + ) diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/tts_config_controller.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/tts_config_controller.py new file mode 100644 index 00000000..b4801dfb --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/tts_config_controller.py @@ -0,0 +1,253 @@ +import json +from uuid import UUID + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Path, Request, status +from fastapi.responses import JSONResponse + +from common_module.common_container import CommonContainer +from common_module.response_formatter import ResponseFormatter +from voice_agents_module.models.tts_schemas import ( + CreateTtsConfigPayload, + UpdateTtsConfigPayload, + TtsProvider, + UNSET, +) +from voice_agents_module.services.tts_config_service import TtsConfigService +from voice_agents_module.voice_agents_container import VoiceAgentsContainer + +tts_config_router = APIRouter() + + +@tts_config_router.post('/v1/tts-configs') +@inject +async def create_tts_config( + request: Request, + payload: CreateTtsConfigPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tts_config_service: TtsConfigService = Depends( + Provide[VoiceAgentsContainer.tts_config_service] + ), +): + """ + Create a new TTS configuration + + Creates a Text-to-Speech provider configuration with voice settings. + + Args: + payload: Configuration details including provider, voice_id, api_key, etc. + + Returns: + JSONResponse: Created configuration (api_key excluded) + """ + config = await tts_config_service.create_config( + display_name=payload.display_name, + description=payload.description, + provider=payload.provider.value, + voice_id=payload.voice_id, + api_key=payload.api_key, + language=payload.language, + parameters=payload.parameters, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'TTS configuration created successfully', + 'tts_config_id': str(config['id']), + } + ), + ) + + +@tts_config_router.get('/v1/tts-configs') +@inject +async def list_tts_configs( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tts_config_service: TtsConfigService = Depends( + Provide[VoiceAgentsContainer.tts_config_service] + ), +): + """ + List all TTS configurations + + Returns: + JSONResponse: List of configurations (api_key excluded) + """ + configs_data = await tts_config_service.list_configs() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'tts_configs': configs_data}), + ) + + +@tts_config_router.get('/v1/tts-configs/{config_id}') +@inject +async def get_tts_config( + config_id: UUID = Path(..., description='The ID of the TTS configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tts_config_service: TtsConfigService = Depends( + Provide[VoiceAgentsContainer.tts_config_service] + ), +): + """ + Get a single TTS configuration by ID + + Args: + config_id: UUID of the configuration to retrieve + + Returns: + JSONResponse: Configuration details (api_key excluded) + """ + config_dict = await tts_config_service.get_config(config_id) + + if not config_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'TTS configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(config_dict), + ) + + +@tts_config_router.put('/v1/tts-configs/{config_id}') +@inject +async def update_tts_config( + config_id: UUID = Path(..., description='The ID of the TTS configuration'), + payload: UpdateTtsConfigPayload = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tts_config_service: TtsConfigService = Depends( + Provide[VoiceAgentsContainer.tts_config_service] + ), +): + """ + Update a TTS configuration + + Updates specified fields of a TTS configuration. + Only provided fields will be updated. + + Args: + config_id: UUID of the configuration to update + payload: Fields to update + + Returns: + JSONResponse: Success message + """ + # Build update dict (only include set fields) + update_data = {} + if payload.display_name is not UNSET: + update_data['display_name'] = payload.display_name + if payload.description is not UNSET: + update_data['description'] = payload.description + if payload.provider is not UNSET: + if hasattr(payload.provider, 'value'): + # It's an enum object + update_data['provider'] = payload.provider.value + elif isinstance(payload.provider, str) and payload.provider in [ + e.value for e in TtsProvider + ]: + # It's a valid enum value string + update_data['provider'] = payload.provider + else: + # Invalid value + valid_values = [e.value for e in TtsProvider] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid provider value. Must be one of: {valid_values}' + ), + ) + if payload.voice_id is not UNSET: + update_data['voice_id'] = payload.voice_id + if payload.api_key is not UNSET: + update_data['api_key'] = payload.api_key + if payload.language is not UNSET: + update_data['language'] = payload.language + if payload.parameters is not UNSET: + update_data['parameters'] = ( + json.dumps(payload.parameters) if payload.parameters else None + ) + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + updated_config = await tts_config_service.update_config(config_id, **update_data) + + if not updated_config: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'TTS configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'TTS configuration updated successfully', + 'tts_config_id': str(config_id), + } + ), + ) + + +@tts_config_router.delete('/v1/tts-configs/{config_id}') +@inject +async def delete_tts_config( + config_id: UUID = Path(..., description='The ID of the TTS configuration'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + tts_config_service: TtsConfigService = Depends( + Provide[VoiceAgentsContainer.tts_config_service] + ), +): + """ + Delete a TTS configuration (soft delete) + + Marks the configuration as deleted (sets is_deleted=True). + + Args: + config_id: UUID of the configuration to delete + + Returns: + JSONResponse: Success message + """ + deleted = await tts_config_service.delete_config(config_id) + + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'TTS configuration not found with id: {config_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'TTS configuration deleted successfully', + 'tts_config_id': str(config_id), + } + ), + ) diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/voice_agent_controller.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/voice_agent_controller.py new file mode 100644 index 00000000..6d0bdf15 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/controllers/voice_agent_controller.py @@ -0,0 +1,415 @@ +import json +from datetime import datetime +from uuid import UUID + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Path, Request, status +from fastapi.responses import JSONResponse + +from common_module.common_container import CommonContainer +from common_module.log.logger import logger +from common_module.response_formatter import ResponseFormatter +from voice_agents_module.models.voice_agent_schemas import ( + CreateVoiceAgentPayload, + UpdateVoiceAgentPayload, + VoiceAgentStatus, + UNSET, + InitiateCallPayload, +) +from voice_agents_module.services.voice_agent_service import VoiceAgentService +from voice_agents_module.services.twilio_service import TwilioService +from voice_agents_module.voice_agents_container import VoiceAgentsContainer + +voice_agent_router = APIRouter() + + +@voice_agent_router.post('/v1/voice-agents') +@inject +async def create_voice_agent( + request: Request, + payload: CreateVoiceAgentPayload, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), +): + """ + Create a new voice agent + + Creates a voice agent with configurations for LLM, TTS, STT, and telephony. + + Args: + payload: Voice agent details including name, configs, and system prompt + + Returns: + JSONResponse: Created voice agent details + """ + agent = await voice_agent_service.create_agent( + name=payload.name, + description=payload.description, + llm_config_id=payload.llm_config_id, + tts_config_id=payload.tts_config_id, + stt_config_id=payload.stt_config_id, + telephony_config_id=payload.telephony_config_id, + system_prompt=payload.system_prompt, + welcome_message=payload.welcome_message, + conversation_config=payload.conversation_config, + status=payload.status.value, + ) + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Voice agent created successfully', + 'voice_agent': agent, + } + ), + ) + + +@voice_agent_router.get('/v1/voice-agents') +@inject +async def list_voice_agents( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), +): + """ + List all voice agents + + Returns: + JSONResponse: List of all voice agents + """ + agents_data = await voice_agent_service.list_agents() + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'voice_agents': agents_data}), + ) + + +@voice_agent_router.get('/v1/voice-agents/{agent_id}') +@inject +async def get_voice_agent( + agent_id: UUID = Path(..., description='The ID of the voice agent'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), +): + """ + Get a single voice agent by ID + + Args: + agent_id: UUID of the voice agent to retrieve + + Returns: + JSONResponse: Voice agent details + """ + agent_dict = await voice_agent_service.get_agent(agent_id) + + if not agent_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Voice agent not found with id: {agent_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse(agent_dict), + ) + + +@voice_agent_router.patch('/v1/voice-agents/{agent_id}') +@inject +async def update_voice_agent( + agent_id: UUID = Path(..., description='The ID of the voice agent'), + payload: UpdateVoiceAgentPayload = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), +): + """ + Update a voice agent + + Updates specified fields of a voice agent. + Only provided fields will be updated. + + Args: + agent_id: UUID of the voice agent to update + payload: Fields to update + + Returns: + JSONResponse: Success message + """ + # Build update dict (only include set fields) + update_data = {} + + if payload.name is not UNSET: + update_data['name'] = payload.name + if payload.description is not UNSET: + update_data['description'] = payload.description + if payload.llm_config_id is not UNSET: + update_data['llm_config_id'] = payload.llm_config_id + if payload.tts_config_id is not UNSET: + update_data['tts_config_id'] = payload.tts_config_id + if payload.stt_config_id is not UNSET: + update_data['stt_config_id'] = payload.stt_config_id + if payload.telephony_config_id is not UNSET: + update_data['telephony_config_id'] = payload.telephony_config_id + if payload.system_prompt is not UNSET: + update_data['system_prompt'] = payload.system_prompt + if payload.welcome_message is not UNSET: + update_data['welcome_message'] = payload.welcome_message + if payload.conversation_config is not UNSET: + update_data['conversation_config'] = ( + json.dumps(payload.conversation_config) + if payload.conversation_config + else None + ) + if payload.status is not UNSET: + if hasattr(payload.status, 'value'): + # It's an enum object + update_data['status'] = payload.status.value + elif isinstance(payload.status, str) and payload.status in [ + e.value for e in VoiceAgentStatus + ]: + # It's a valid enum value string + update_data['status'] = payload.status + else: + # Invalid value + valid_values = [e.value for e in VoiceAgentStatus] + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Invalid status value. Must be one of: {valid_values}' + ), + ) + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + updated_agent = await voice_agent_service.update_agent(agent_id, **update_data) + + if not updated_agent: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Voice agent not found with id: {agent_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Voice agent updated successfully', + 'voice_agent': updated_agent, + } + ), + ) + + +@voice_agent_router.delete('/v1/voice-agents/{agent_id}') +@inject +async def delete_voice_agent( + agent_id: UUID = Path(..., description='The ID of the voice agent'), + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), +): + """ + Delete a voice agent (soft delete) + + Marks the voice agent as deleted (sets is_deleted=True). + + Args: + agent_id: UUID of the voice agent to delete + + Returns: + JSONResponse: Success message + """ + deleted = await voice_agent_service.delete_agent(agent_id) + + if not deleted: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Voice agent not found with id: {agent_id}' + ), + ) + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Voice agent deleted successfully', + 'voice_agent_id': str(agent_id), + } + ), + ) + + +@voice_agent_router.post('/v1/voice-agents/{agent_id}/initiate') +@inject +async def initiate_call( + agent_id: UUID = Path(..., description='The ID of the voice agent'), + payload: InitiateCallPayload = ..., + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + voice_agent_service: VoiceAgentService = Depends( + Provide[VoiceAgentsContainer.voice_agent_service] + ), + twilio_service: TwilioService = Depends( + Provide[VoiceAgentsContainer.twilio_service] + ), +): + """ + Initiate an outbound call for a voice agent + + Validates the agent, selects appropriate phone number, and initiates + a call using Twilio. + + Args: + agent_id: UUID of the voice agent + payload: Call details (to_number, optional from_number) + + Returns: + JSONResponse: Call initiation details + """ + # Fetch the voice agent + agent_dict = await voice_agent_service.get_agent(agent_id) + + if not agent_dict: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse( + f'Voice agent not found with id: {agent_id}' + ), + ) + + # Check if agent is active + if agent_dict.get('status') != 'active': + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'Voice agent must be active to initiate calls. Current status: {agent_dict.get("status")}' + ), + ) + + # Fetch telephony config + telephony_config_id = agent_dict.get('telephony_config_id') + telephony_config = await voice_agent_service.telephony_config_service.get_config( + telephony_config_id + ) + + if not telephony_config: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Telephony config not found with id: {telephony_config_id}' + ), + ) + + # Parse phone_numbers from telephony config + phone_numbers = telephony_config.get('phone_numbers') + if ( + not phone_numbers + or not isinstance(phone_numbers, list) + or len(phone_numbers) == 0 + ): + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'No phone numbers configured in telephony config' + ), + ) + + # Select from_number + from_number = payload.from_number + if from_number: + # Validate that provided from_number is in the configured numbers + if from_number not in phone_numbers: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + f'from_number {from_number} is not in the configured phone numbers: {phone_numbers}' + ), + ) + else: + # Default to first configured number + from_number = phone_numbers[0] + + # Extract Twilio credentials from telephony config + credentials = telephony_config.get('credentials', {}) + account_sid = credentials.get('account_sid') + auth_token = credentials.get('auth_token') + + if not account_sid or not auth_token: + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + 'Twilio credentials (account_sid, auth_token) not found in telephony config' + ), + ) + + # Generate presigned URL for welcome message audio + welcome_message_audio_url = '' + if agent_dict.get('welcome_message'): + try: + welcome_message_audio_url = ( + await voice_agent_service.get_welcome_message_audio_url(agent_id) + ) + except Exception as e: + logger.error(f'Failed to generate welcome message audio URL: {str(e)}') + # Continue with empty URL - call will proceed without welcome message + + # Initiate the call using Twilio + call_details = twilio_service.initiate_call( + to_number=payload.to_number, + from_number=from_number, + voice_agent_id=str(agent_id), + welcome_message_audio_url=welcome_message_audio_url, + account_sid=account_sid, + auth_token=auth_token, + ) + + # Build response + response_data = { + 'call_sid': call_details['call_sid'], + 'status': call_details['status'], + 'to_number': call_details['to_number'], + 'from_number': call_details['from_number'], + 'voice_agent_id': str(agent_id), + 'initiated_at': datetime.utcnow().isoformat(), + } + + return JSONResponse( + status_code=status.HTTP_201_CREATED, + content=response_formatter.buildSuccessResponse( + { + 'message': 'Call initiated successfully', + 'call': response_data, + } + ), + ) diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/models/stt_schemas.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/stt_schemas.py new file mode 100644 index 00000000..c74a3da4 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/stt_schemas.py @@ -0,0 +1,60 @@ +from pydantic import BaseModel, Field +from typing import Optional, Union, Any, Dict +from enum import Enum +from datetime import datetime +import uuid + +# Sentinel value for partial updates +UNSET = object() + + +class SttProvider(str, Enum): + DEEPGRAM = 'deepgram' + ASSEMBLYAI = 'assemblyai' + WHISPER = 'whisper' + GOOGLE = 'google' + AZURE = 'azure' + + +class CreateSttConfigPayload(BaseModel): + display_name: str = Field( + ..., + min_length=1, + max_length=100, + description='Display name for the STT configuration', + ) + description: Optional[str] = Field( + None, + max_length=500, + description='Optional description of the STT configuration', + ) + provider: SttProvider = Field(..., description='STT provider') + api_key: str = Field(..., description='API key for the STT provider') + language: Optional[str] = Field( + None, + description='ISO 639-1 language code (optional, most providers auto-detect)', + ) + parameters: Optional[Dict[str, Any]] = Field( + None, description='Provider-specific parameters as JSON object (optional)' + ) + + +class UpdateSttConfigPayload(BaseModel): + display_name: Union[str, Any] = Field(default=UNSET) + description: Union[str, None, Any] = Field(default=UNSET) + provider: Union[SttProvider, Any] = Field(default=UNSET) + api_key: Union[str, Any] = Field(default=UNSET) + language: Union[str, None, Any] = Field(default=UNSET) + parameters: Union[Dict[str, Any], None, Any] = Field(default=UNSET) + + +class SttConfigResponse(BaseModel): + id: uuid.UUID + display_name: str + description: Optional[str] + provider: str + language: Optional[str] + parameters: Optional[Dict[str, Any]] + is_deleted: bool + created_at: datetime + updated_at: datetime diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/models/telephony_schemas.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/telephony_schemas.py new file mode 100644 index 00000000..95a45cff --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/telephony_schemas.py @@ -0,0 +1,124 @@ +from pydantic import BaseModel, Field, model_validator +from typing import Optional, Union, Any, Dict, List, Literal +from enum import Enum +from datetime import datetime +import uuid + +# Sentinel value for partial updates +UNSET = object() + + +class TelephonyProvider(str, Enum): + TWILIO = 'twilio' + + +class ConnectionType(str, Enum): + WEBSOCKET = 'websocket' + SIP = 'sip' + + +class WebhookConfig(BaseModel): + """ + Webhook configuration for call status updates. + + Used to receive Twilio status callbacks (ringing, answered, completed, etc.) + Optional for both websocket and SIP connection types. + """ + + status_callback_url: str = Field( + ..., + description='URL to receive call status updates (ringing, answered, completed)', + example='https://example.com/webhooks/call-status', + ) + + +class SipConfig(BaseModel): + """ + SIP connection configuration. + + Required for SIP connection type, specifies the SIP domain and optional parameters. + """ + + sip_domain: str = Field( + ..., + description='SIP domain (e.g., pstn.twilio.com, example.sip.daily.co)', + example='pstn.twilio.com', + ) + port: Optional[int] = Field( + None, + description='SIP port (optional, provider-specific)', + ge=1, + le=65535, + example=5061, + ) + transport: Optional[Literal['udp', 'tcp', 'tls']] = Field( + None, description='SIP transport protocol (optional)', example='tls' + ) + + +class CreateTelephonyConfigPayload(BaseModel): + display_name: str = Field( + ..., + min_length=1, + max_length=100, + description='Display name for the telephony configuration', + ) + description: Optional[str] = Field( + None, + max_length=500, + description='Optional description of the telephony configuration', + ) + provider: TelephonyProvider = Field( + ..., description='Telephony provider (twilio for Phase 1)' + ) + connection_type: ConnectionType = Field( + ..., description='Connection type (websocket or sip)' + ) + credentials: Dict[str, Any] = Field( + ..., + description='Provider credentials as JSON object (e.g., {account_sid, auth_token})', + ) + phone_numbers: List[str] = Field( + ..., + description='List of phone numbers available for outbound calls', + example=['+1234567890', '+0987654321'], + ) + webhook_config: Optional[WebhookConfig] = Field( + None, + description='Webhook configuration for status callbacks (optional for both connection types)', + ) + sip_config: Optional[SipConfig] = Field( + None, description='SIP configuration (required for SIP connection type)' + ) + + @model_validator(mode='after') + def validate_connection_type_requirements(self): + """Validate connection type specific requirements""" + if self.connection_type == ConnectionType.SIP and not self.sip_config: + raise ValueError('sip_config is required for SIP connection type') + return self + + +class UpdateTelephonyConfigPayload(BaseModel): + display_name: Union[str, Any] = Field(default=UNSET) + description: Union[str, None, Any] = Field(default=UNSET) + provider: Union[TelephonyProvider, Any] = Field(default=UNSET) + connection_type: Union[ConnectionType, Any] = Field(default=UNSET) + credentials: Union[Dict[str, Any], Any] = Field(default=UNSET) + phone_numbers: Union[List[str], Any] = Field(default=UNSET) + webhook_config: Union[WebhookConfig, None, Any] = Field(default=UNSET) + sip_config: Union[SipConfig, None, Any] = Field(default=UNSET) + + +class TelephonyConfigResponse(BaseModel): + id: uuid.UUID + display_name: str + description: Optional[str] + provider: str + connection_type: str + phone_numbers: List[str] + webhook_config: Optional[WebhookConfig] + sip_config: Optional[SipConfig] + is_deleted: bool + created_at: datetime + updated_at: datetime diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/models/tts_schemas.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/tts_schemas.py new file mode 100644 index 00000000..090c2ccf --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/tts_schemas.py @@ -0,0 +1,63 @@ +from pydantic import BaseModel, Field +from typing import Optional, Union, Any, Dict +from enum import Enum +from datetime import datetime +import uuid + +# Sentinel value for partial updates +UNSET = object() + + +class TtsProvider(str, Enum): + ELEVENLABS = 'elevenlabs' + DEEPGRAM = 'deepgram' + CARTESIA = 'cartesia' + AZURE = 'azure' + GOOGLE = 'google' + AWS = 'aws' + + +class CreateTtsConfigPayload(BaseModel): + display_name: str = Field( + ..., + min_length=1, + max_length=100, + description='Display name for the TTS configuration', + ) + description: Optional[str] = Field( + None, + max_length=500, + description='Optional description of the TTS configuration', + ) + provider: TtsProvider = Field(..., description='TTS provider') + voice_id: str = Field(..., description='Provider-specific voice identifier') + api_key: str = Field(..., description='API key for the TTS provider') + language: Optional[str] = Field( + None, description='ISO 639-1 language code (optional, for multi-lingual voices)' + ) + parameters: Optional[Dict[str, Any]] = Field( + None, description='Provider-specific parameters as JSON object (optional)' + ) + + +class UpdateTtsConfigPayload(BaseModel): + display_name: Union[str, Any] = Field(default=UNSET) + description: Union[str, None, Any] = Field(default=UNSET) + provider: Union[TtsProvider, Any] = Field(default=UNSET) + voice_id: Union[str, Any] = Field(default=UNSET) + api_key: Union[str, Any] = Field(default=UNSET) + language: Union[str, None, Any] = Field(default=UNSET) + parameters: Union[Dict[str, Any], None, Any] = Field(default=UNSET) + + +class TtsConfigResponse(BaseModel): + id: uuid.UUID + display_name: str + description: Optional[str] + provider: str + voice_id: str + language: Optional[str] + parameters: Optional[Dict[str, Any]] + is_deleted: bool + created_at: datetime + updated_at: datetime diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/models/voice_agent_schemas.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/voice_agent_schemas.py new file mode 100644 index 00000000..8499b08b --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/models/voice_agent_schemas.py @@ -0,0 +1,74 @@ +from pydantic import BaseModel, Field +from typing import Optional, Union, Any, Dict +from enum import Enum +from datetime import datetime +import uuid + +# Sentinel value for partial updates +UNSET = object() + + +class VoiceAgentStatus(str, Enum): + ACTIVE = 'active' + INACTIVE = 'inactive' + + +class CreateVoiceAgentPayload(BaseModel): + name: str = Field(..., description='Name of the voice agent') + description: Optional[str] = Field( + None, description='Description of the voice agent' + ) + llm_config_id: uuid.UUID = Field(..., description='LLM inference config ID') + tts_config_id: uuid.UUID = Field(..., description='TTS config ID') + stt_config_id: uuid.UUID = Field(..., description='STT config ID') + telephony_config_id: uuid.UUID = Field(..., description='Telephony config ID') + system_prompt: str = Field(..., description='System prompt for the LLM') + conversation_config: Optional[Dict[str, Any]] = Field( + None, description='Conversation configuration settings (optional)' + ) + welcome_message: str = Field( + ..., + description='Welcome message to play at call start (will be converted to audio)', + ) + status: VoiceAgentStatus = Field( + default=VoiceAgentStatus.INACTIVE, + description='Agent status (active or inactive)', + ) + + +class UpdateVoiceAgentPayload(BaseModel): + name: Union[str, Any] = Field(default=UNSET) + description: Union[str, None, Any] = Field(default=UNSET) + llm_config_id: Union[uuid.UUID, Any] = Field(default=UNSET) + tts_config_id: Union[uuid.UUID, Any] = Field(default=UNSET) + stt_config_id: Union[uuid.UUID, Any] = Field(default=UNSET) + telephony_config_id: Union[uuid.UUID, Any] = Field(default=UNSET) + system_prompt: Union[str, Any] = Field(default=UNSET) + conversation_config: Union[Dict[str, Any], None, Any] = Field(default=UNSET) + welcome_message: Union[str, Any] = Field(default=UNSET) + status: Union[VoiceAgentStatus, Any] = Field(default=UNSET) + + +class VoiceAgentResponse(BaseModel): + id: uuid.UUID + name: str + description: Optional[str] + llm_config_id: uuid.UUID + tts_config_id: uuid.UUID + stt_config_id: uuid.UUID + telephony_config_id: uuid.UUID + system_prompt: str + conversation_config: Optional[Dict[str, Any]] + welcome_message: str + status: str + is_deleted: bool + created_at: datetime + updated_at: datetime + + +class InitiateCallPayload(BaseModel): + to_number: str = Field(..., description='Destination phone number (E.164 format)') + from_number: Optional[str] = Field( + None, + description='Source phone number (optional, defaults to first configured number)', + ) diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/stt_config_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/stt_config_service.py new file mode 100644 index 00000000..c9c452b0 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/stt_config_service.py @@ -0,0 +1,222 @@ +import json +from typing import List, Optional +from uuid import UUID + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.stt_config import SttConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from voice_agents_module.utils.cache_utils import ( + get_stt_config_cache_key, + get_stt_configs_list_cache_key, +) +from voice_agents_module.utils.cache_invalidation import ( + invalidate_call_processing_cache, +) + + +class SttConfigService: + """Service for handling STT configuration CRUD operations with caching""" + + def __init__( + self, + stt_config_repository: SQLAlchemyRepository[SttConfig], + cache_manager: CacheManager, + ): + """ + Initialize the STT config service + + Args: + stt_config_repository: Repository for STT configs + cache_manager: Cache manager instance + """ + self.stt_config_repository = stt_config_repository + self.cache_manager = cache_manager + self.stt_config_cache_time = 3600 * 24 + + async def create_config( + self, + display_name: str, + description: Optional[str] = None, + provider: str = None, + api_key: str = None, + language: Optional[str] = None, + parameters: Optional[dict] = None, + ) -> dict: + """ + Create a new STT configuration + + Args: + display_name: Display name for the configuration + description: Optional description + provider: STT provider + api_key: API key for the STT provider + language: ISO 639-1 language code (optional) + parameters: Provider-specific parameters (optional) + + Returns: + Created STT config as dict + """ + logger.info( + f'Creating STT config - display_name: {display_name}, provider: {provider}' + ) + + config = await self.stt_config_repository.create( + display_name=display_name, + description=description, + provider=provider, + api_key=api_key, + language=language, + parameters=json.dumps(parameters) if parameters else None, + ) + + # Convert to dict + config_dict = config.to_dict(exclude_api_key=False) + + # Cache the config + cache_key = get_stt_config_cache_key(config.id) + self.cache_manager.add( + cache_key, json.dumps(config_dict), expiry=self.stt_config_cache_time + ) + + # Invalidate list cache + list_cache_key = get_stt_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('stt_config', config.id, 'create') + + logger.info(f'Successfully created STT config with id: {config.id}') + return config_dict + + async def get_config(self, config_id: UUID) -> Optional[dict]: + """ + Get an STT configuration by ID (with caching) + + Args: + config_id: UUID of the configuration + + Returns: + STT config as dict or None if not found + """ + cache_key = get_stt_config_cache_key(config_id) + + # Try cache first + cached_config_str = self.cache_manager.get_str(cache_key) + if cached_config_str: + logger.info(f'Cache hit for STT config: {config_id}') + return json.loads(cached_config_str) + + # Cache miss - fetch from DB + logger.info(f'Cache miss - fetching STT config from DB: {config_id}') + config = await self.stt_config_repository.find_one( + id=config_id, is_deleted=False + ) + + if config: + # Convert to dict and cache + config_dict = config.to_dict(exclude_api_key=False) + self.cache_manager.add( + cache_key, json.dumps(config_dict), expiry=self.stt_config_cache_time + ) + return config_dict + + return None + + async def list_configs(self) -> List[dict]: + """ + List all STT configurations (with caching) + + Returns: + List of STT configs as dicts + """ + list_cache_key = get_stt_configs_list_cache_key() + + # Try cache first + cached_list_str = self.cache_manager.get_str(list_cache_key) + if cached_list_str: + logger.info('Cache hit for STT configs list') + return json.loads(cached_list_str) + + # Cache miss - fetch from DB + logger.info('Cache miss - fetching STT configs list from DB') + configs = await self.stt_config_repository.find(is_deleted=False) + + # Convert to dicts and cache + configs_dicts = [config.to_dict(exclude_api_key=False) for config in configs] + self.cache_manager.add( + list_cache_key, json.dumps(configs_dicts), expiry=self.stt_config_cache_time + ) + + return configs_dicts + + async def update_config(self, config_id: UUID, **update_data) -> Optional[dict]: + """ + Update an STT configuration + + Args: + config_id: UUID of the configuration + **update_data: Fields to update + + Returns: + Updated config as dict or None if not found + """ + logger.info(f'Updating STT config: {config_id}') + + existing_config = await self.stt_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return None + + updated_config = await self.stt_config_repository.find_one_and_update( + {'id': config_id}, refresh=True, **update_data + ) + + # Invalidate caches + cache_key = get_stt_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_stt_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('stt_config', config_id, 'update') + + logger.info(f'Successfully updated STT config: {config_id}') + return updated_config.to_dict(exclude_api_key=False) + + async def delete_config(self, config_id: UUID) -> bool: + """ + Delete an STT configuration (soft delete) + + Args: + config_id: UUID of the configuration + + Returns: + True if deleted, False if not found + """ + logger.info(f'Deleting STT config: {config_id}') + + existing_config = await self.stt_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return False + + await self.stt_config_repository.find_one_and_update( + {'id': config_id}, is_deleted=True + ) + + # Invalidate caches + cache_key = get_stt_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_stt_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('stt_config', config_id, 'delete') + + logger.info(f'Successfully deleted STT config: {config_id}') + return True diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/telephony_config_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/telephony_config_service.py new file mode 100644 index 00000000..7d133fac --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/telephony_config_service.py @@ -0,0 +1,253 @@ +import json +from typing import List, Optional +from uuid import UUID + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.telephony_config import TelephonyConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from voice_agents_module.models.telephony_schemas import WebhookConfig, SipConfig +from voice_agents_module.utils.cache_utils import ( + get_telephony_config_cache_key, + get_telephony_configs_list_cache_key, +) +from voice_agents_module.utils.cache_invalidation import ( + invalidate_call_processing_cache, +) + + +class TelephonyConfigService: + """Service for handling telephony configuration CRUD operations with caching""" + + def __init__( + self, + telephony_config_repository: SQLAlchemyRepository[TelephonyConfig], + cache_manager: CacheManager, + ): + """ + Initialize the telephony config service + + Args: + telephony_config_repository: Repository for telephony configs + cache_manager: Cache manager instance + """ + self.telephony_config_repository = telephony_config_repository + self.cache_manager = cache_manager + self.telephony_config_cache_time = 3600 * 24 + + async def create_config( + self, + display_name: str, + description: Optional[str] = None, + provider: str = None, + connection_type: str = None, + credentials: dict = None, + phone_numbers: list = None, + webhook_config: Optional[WebhookConfig] = None, + sip_config: Optional[SipConfig] = None, + ) -> dict: + """ + Create a new telephony configuration + + Args: + display_name: Display name for the configuration + description: Optional description + provider: Telephony provider + connection_type: Connection type (websocket/sip) + credentials: Provider credentials + phone_numbers: List of phone numbers available for outbound calls + webhook_config: Webhook configuration Pydantic model (optional) + sip_config: SIP configuration Pydantic model (optional) + + Returns: + Created telephony config as dict + """ + logger.info( + f'Creating telephony config - display_name: {display_name}, provider: {provider}, connection_type: {connection_type}' + ) + + config = await self.telephony_config_repository.create( + display_name=display_name, + description=description, + provider=provider, + connection_type=connection_type, + credentials=json.dumps(credentials), + phone_numbers=json.dumps(phone_numbers), + webhook_config=( + json.dumps(webhook_config.model_dump()) if webhook_config else None + ), + sip_config=json.dumps(sip_config.model_dump()) if sip_config else None, + ) + + # Convert to dict + config_dict = config.to_dict(exclude_credentials=False) + + # Cache the config + cache_key = get_telephony_config_cache_key(config.id) + self.cache_manager.add( + cache_key, json.dumps(config_dict), expiry=self.telephony_config_cache_time + ) + + # Invalidate list cache + list_cache_key = get_telephony_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('telephony_config', config.id, 'create') + + logger.info(f'Successfully created telephony config with id: {config.id}') + return config_dict + + async def get_config(self, config_id: UUID) -> Optional[dict]: + """ + Get a telephony configuration by ID (with caching) + + Args: + config_id: UUID of the configuration + + Returns: + Telephony config as dict or None if not found + """ + cache_key = get_telephony_config_cache_key(config_id) + + # Try cache first + cached_config_str = self.cache_manager.get_str(cache_key) + if cached_config_str: + logger.info(f'Cache hit for telephony config: {config_id}') + return json.loads(cached_config_str) + + # Cache miss - fetch from DB + logger.info(f'Cache miss - fetching telephony config from DB: {config_id}') + config = await self.telephony_config_repository.find_one( + id=config_id, is_deleted=False + ) + + if config: + # Convert to dict and cache + config_dict = config.to_dict(exclude_credentials=False) + self.cache_manager.add( + cache_key, + json.dumps(config_dict), + expiry=self.telephony_config_cache_time, + ) + return config_dict + + return None + + async def list_configs(self) -> List[dict]: + """ + List all telephony configurations (with caching) + + Returns: + List of telephony configs as dicts + """ + list_cache_key = get_telephony_configs_list_cache_key() + + # Try cache first + cached_list_str = self.cache_manager.get_str(list_cache_key) + if cached_list_str: + logger.info('Cache hit for telephony configs list') + return json.loads(cached_list_str) + + # Cache miss - fetch from DB + logger.info('Cache miss - fetching telephony configs list from DB') + configs = await self.telephony_config_repository.find(is_deleted=False) + + # Convert to dicts and cache + configs_dicts = [ + config.to_dict(exclude_credentials=False) for config in configs + ] + self.cache_manager.add( + list_cache_key, + json.dumps(configs_dicts), + expiry=self.telephony_config_cache_time, + ) + + return configs_dicts + + async def update_config(self, config_id: UUID, **update_data) -> Optional[dict]: + """ + Update a telephony configuration + + Args: + config_id: UUID of the configuration + **update_data: Fields to update + + Returns: + Updated config as dict or None if not found + + Raises: + ValueError: If validation fails (e.g., SIP connection without sip_config) + """ + logger.info(f'Updating telephony config: {config_id}') + + existing_config = await self.telephony_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return None + + # Validate connection type requirements after merge + final_connection_type = update_data.get( + 'connection_type', existing_config.connection_type + ) + final_sip_config = update_data.get('sip_config', existing_config.sip_config) + + # If final state is SIP connection, ensure sip_config exists + if final_connection_type == 'sip' and not final_sip_config: + raise ValueError( + 'sip_config is required for SIP connection type. ' + 'Provide sip_config or change connection_type.' + ) + + updated_config = await self.telephony_config_repository.find_one_and_update( + {'id': config_id}, refresh=True, **update_data + ) + + # Invalidate caches + cache_key = get_telephony_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_telephony_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('telephony_config', config_id, 'update') + + logger.info(f'Successfully updated telephony config: {config_id}') + return updated_config.to_dict(exclude_credentials=False) + + async def delete_config(self, config_id: UUID) -> bool: + """ + Delete a telephony configuration (soft delete) + + Args: + config_id: UUID of the configuration + + Returns: + True if deleted, False if not found + """ + logger.info(f'Deleting telephony config: {config_id}') + + existing_config = await self.telephony_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return False + + await self.telephony_config_repository.find_one_and_update( + {'id': config_id}, is_deleted=True + ) + + # Invalidate caches + cache_key = get_telephony_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_telephony_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('telephony_config', config_id, 'delete') + + logger.info(f'Successfully deleted telephony config: {config_id}') + return True diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_config_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_config_service.py new file mode 100644 index 00000000..2678bf82 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_config_service.py @@ -0,0 +1,225 @@ +import json +from typing import List, Optional +from uuid import UUID + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.tts_config import TtsConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from voice_agents_module.utils.cache_utils import ( + get_tts_config_cache_key, + get_tts_configs_list_cache_key, +) +from voice_agents_module.utils.cache_invalidation import ( + invalidate_call_processing_cache, +) + + +class TtsConfigService: + """Service for handling TTS configuration CRUD operations with caching""" + + def __init__( + self, + tts_config_repository: SQLAlchemyRepository[TtsConfig], + cache_manager: CacheManager, + ): + """ + Initialize the TTS config service + + Args: + tts_config_repository: Repository for TTS configs + cache_manager: Cache manager instance + """ + self.tts_config_repository = tts_config_repository + self.cache_manager = cache_manager + self.tts_config_cache_time = 3600 * 24 + + async def create_config( + self, + display_name: str, + description: Optional[str] = None, + provider: str = None, + voice_id: str = None, + api_key: str = None, + language: Optional[str] = None, + parameters: Optional[dict] = None, + ) -> dict: + """ + Create a new TTS configuration + + Args: + display_name: Display name for the configuration + description: Optional description + provider: TTS provider + voice_id: Provider-specific voice identifier + api_key: API key for the TTS provider + language: ISO 639-1 language code (optional) + parameters: Provider-specific parameters (optional) + + Returns: + Created TTS config as dict + """ + logger.info( + f'Creating TTS config - display_name: {display_name}, provider: {provider}, voice_id: {voice_id}' + ) + + config = await self.tts_config_repository.create( + display_name=display_name, + description=description, + provider=provider, + voice_id=voice_id, + api_key=api_key, + language=language, + parameters=json.dumps(parameters) if parameters else None, + ) + + # Convert to dict + config_dict = config.to_dict(exclude_api_key=False) + + # Cache the config + cache_key = get_tts_config_cache_key(config.id) + self.cache_manager.add( + cache_key, json.dumps(config_dict), expiry=self.tts_config_cache_time + ) + + # Invalidate list cache + list_cache_key = get_tts_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('tts_config', config.id, 'create') + + logger.info(f'Successfully created TTS config with id: {config.id}') + return config_dict + + async def get_config(self, config_id: UUID) -> Optional[dict]: + """ + Get a TTS configuration by ID (with caching) + + Args: + config_id: UUID of the configuration + + Returns: + TTS config as dict or None if not found + """ + cache_key = get_tts_config_cache_key(config_id) + + # Try cache first + cached_config_str = self.cache_manager.get_str(cache_key) + if cached_config_str: + logger.info(f'Cache hit for TTS config: {config_id}') + return json.loads(cached_config_str) + + # Cache miss - fetch from DB + logger.info(f'Cache miss - fetching TTS config from DB: {config_id}') + config = await self.tts_config_repository.find_one( + id=config_id, is_deleted=False + ) + + if config: + # Convert to dict and cache + config_dict = config.to_dict(exclude_api_key=False) + self.cache_manager.add( + cache_key, json.dumps(config_dict), expiry=self.tts_config_cache_time + ) + return config_dict + + return None + + async def list_configs(self) -> List[dict]: + """ + List all TTS configurations (with caching) + + Returns: + List of TTS configs as dicts + """ + list_cache_key = get_tts_configs_list_cache_key() + + # Try cache first + cached_list_str = self.cache_manager.get_str(list_cache_key) + if cached_list_str: + logger.info('Cache hit for TTS configs list') + return json.loads(cached_list_str) + + # Cache miss - fetch from DB + logger.info('Cache miss - fetching TTS configs list from DB') + configs = await self.tts_config_repository.find(is_deleted=False) + + # Convert to dicts and cache + configs_dicts = [config.to_dict(exclude_api_key=False) for config in configs] + self.cache_manager.add( + list_cache_key, json.dumps(configs_dicts), expiry=self.tts_config_cache_time + ) + + return configs_dicts + + async def update_config(self, config_id: UUID, **update_data) -> Optional[dict]: + """ + Update a TTS configuration + + Args: + config_id: UUID of the configuration + **update_data: Fields to update + + Returns: + Updated config as dict or None if not found + """ + logger.info(f'Updating TTS config: {config_id}') + + existing_config = await self.tts_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return None + + updated_config = await self.tts_config_repository.find_one_and_update( + {'id': config_id}, refresh=True, **update_data + ) + + # Invalidate caches + cache_key = get_tts_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_tts_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('tts_config', config_id, 'update') + + logger.info(f'Successfully updated TTS config: {config_id}') + return updated_config.to_dict(exclude_api_key=False) + + async def delete_config(self, config_id: UUID) -> bool: + """ + Delete a TTS configuration (soft delete) + + Args: + config_id: UUID of the configuration + + Returns: + True if deleted, False if not found + """ + logger.info(f'Deleting TTS config: {config_id}') + + existing_config = await self.tts_config_repository.find_one( + id=config_id, is_deleted=False + ) + if not existing_config: + return False + + await self.tts_config_repository.find_one_and_update( + {'id': config_id}, is_deleted=True + ) + + # Invalidate caches + cache_key = get_tts_config_cache_key(config_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_tts_configs_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('tts_config', config_id, 'delete') + + logger.info(f'Successfully deleted TTS config: {config_id}') + return True diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_generator_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_generator_service.py new file mode 100644 index 00000000..445d60eb --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/tts_generator_service.py @@ -0,0 +1,228 @@ +""" +TTS Generator Service + +Generates audio from text using various TTS providers. +This service is used to pre-generate welcome message audio files. +""" + +import httpx +from typing import Dict, Any +from common_module.log.logger import logger + + +class TTSGeneratorService: + """Service to generate audio from text using TTS providers""" + + def __init__(self): + self.timeout = 30.0 # 30 seconds timeout for API calls + + async def generate_audio(self, text: str, tts_config: Dict[str, Any]) -> bytes: + """ + Generate audio from text using the specified TTS configuration. + + Args: + text: Text to convert to speech + tts_config: TTS configuration dict with provider, api_key, voice_id, parameters + + Returns: + bytes: Audio data in MP3 format + + Raises: + ValueError: If provider is not supported + Exception: If TTS generation fails + """ + provider = tts_config.get('provider') + api_key = tts_config.get('api_key') + voice_id = tts_config.get('voice_id') + parameters = tts_config.get('parameters', {}) or {} + + logger.info(f'Generating audio with {provider} for voice {voice_id}') + + if provider == 'elevenlabs': + return await self._generate_elevenlabs(text, api_key, voice_id, parameters) + elif provider == 'deepgram': + return await self._generate_deepgram(text, api_key, voice_id, parameters) + elif provider == 'cartesia': + return await self._generate_cartesia(text, api_key, voice_id, parameters) + else: + raise ValueError( + f'Unsupported TTS provider for audio generation: {provider}' + ) + + async def _generate_elevenlabs( + self, text: str, api_key: str, voice_id: str, parameters: Dict[str, Any] + ) -> bytes: + """ + Generate audio using ElevenLabs API. + + API Docs: https://elevenlabs.io/docs/api-reference/text-to-speech + """ + url = f'https://api.elevenlabs.io/v1/text-to-speech/{voice_id}' + + headers = { + 'xi-api-key': api_key, + 'Content-Type': 'application/json', + } + + # Build request body with voice settings + body = { + 'text': text, + 'model_id': parameters.get('model', 'eleven_multilingual_v2'), + } + + # Add voice settings if specified + voice_settings = {} + if 'stability' in parameters: + voice_settings['stability'] = parameters['stability'] + if 'similarity_boost' in parameters: + voice_settings['similarity_boost'] = parameters['similarity_boost'] + if 'style' in parameters: + voice_settings['style'] = parameters['style'] + if 'use_speaker_boost' in parameters: + voice_settings['use_speaker_boost'] = parameters['use_speaker_boost'] + if 'speed' in parameters: + voice_settings['speed'] = parameters['speed'] + + if voice_settings: + body['voice_settings'] = voice_settings + + try: + async with httpx.AsyncClient(timeout=self.timeout) as client: + response = await client.post(url, headers=headers, json=body) + response.raise_for_status() + + # ElevenLabs returns audio directly + logger.info( + f'ElevenLabs audio generated successfully, size: {len(response.content)} bytes' + ) + return response.content + + except httpx.HTTPStatusError as e: + logger.error( + f'ElevenLabs API error: {e.response.status_code} - {e.response.text}' + ) + raise Exception(f'ElevenLabs TTS generation failed: {e.response.text}') + except Exception as e: + logger.error(f'ElevenLabs request failed: {str(e)}') + raise Exception(f'ElevenLabs TTS generation failed: {str(e)}') + + async def _generate_deepgram( + self, text: str, api_key: str, voice_id: str, parameters: Dict[str, Any] + ) -> bytes: + """ + Generate audio using Deepgram API. + + API Docs: https://developers.deepgram.com/docs/text-to-speech + """ + base_url = parameters.get('base_url', 'https://api.deepgram.com') + url = f'{base_url}/v1/speak' + + headers = { + 'Authorization': f'Token {api_key}', + 'Content-Type': 'application/json', + } + + # Build query parameters + params = { + 'model': voice_id, # voice_id is the model (e.g., "aura-2-helena-en") + } + + if 'encoding' in parameters: + params['encoding'] = parameters['encoding'] + else: + params['encoding'] = 'mp3' # Default to mp3 + + if 'sample_rate' in parameters: + params['sample_rate'] = parameters['sample_rate'] + + body = {'text': text} + + try: + async with httpx.AsyncClient(timeout=self.timeout) as client: + response = await client.post( + url, headers=headers, params=params, json=body + ) + response.raise_for_status() + + logger.info( + f'Deepgram audio generated successfully, size: {len(response.content)} bytes' + ) + return response.content + + except httpx.HTTPStatusError as e: + logger.error( + f'Deepgram API error: {e.response.status_code} - {e.response.text}' + ) + raise Exception(f'Deepgram TTS generation failed: {e.response.text}') + except Exception as e: + logger.error(f'Deepgram request failed: {str(e)}') + raise Exception(f'Deepgram TTS generation failed: {str(e)}') + + async def _generate_cartesia( + self, text: str, api_key: str, voice_id: str, parameters: Dict[str, Any] + ) -> bytes: + """ + Generate audio using Cartesia API. + + API Docs: https://docs.cartesia.ai/api-reference/tts/bytes + """ + url = 'https://api.cartesia.ai/tts/bytes' + + headers = { + 'Authorization': f'Bearer {api_key}', + 'Cartesia-Version': '2025-04-16', + 'Content-Type': 'application/json', + } + + # Build request body + body = { + 'model_id': parameters.get('model', 'sonic-3'), + 'transcript': text, + 'voice': { + 'mode': 'id', + 'id': voice_id, + }, + 'output_format': { + 'container': 'mp3', + 'sample_rate': parameters.get( + 'sample_rate', 44100 + ), # 8000, 16000, 22050, 24000, 44100, 48000 + 'bit_rate': parameters.get( + 'bit_rate', 128000 + ), # 32000, 64000, 96000, 128000, 192000 + }, + } + + # Add language (default to 'en') + body['language'] = parameters.get('language', 'en') + + # Build generation_config if any parameters are specified + generation_config = {} + if 'volume' in parameters: + generation_config['volume'] = parameters['volume'] + if 'speed' in parameters: + generation_config['speed'] = parameters['speed'] + if 'emotion' in parameters: + generation_config['emotion'] = parameters['emotion'] + + if generation_config: + body['generation_config'] = generation_config + + try: + async with httpx.AsyncClient(timeout=self.timeout) as client: + response = await client.post(url, headers=headers, json=body) + response.raise_for_status() + + logger.info( + f'Cartesia audio generated successfully, size: {len(response.content)} bytes' + ) + return response.content + + except httpx.HTTPStatusError as e: + logger.error( + f'Cartesia API error: {e.response.status_code} - {e.response.text}' + ) + raise Exception(f'Cartesia TTS generation failed: {e.response.text}') + except Exception as e: + logger.error(f'Cartesia request failed: {str(e)}') + raise Exception(f'Cartesia TTS generation failed: {str(e)}') diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/twilio_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/twilio_service.py new file mode 100644 index 00000000..4ea32657 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/twilio_service.py @@ -0,0 +1,67 @@ +from twilio.rest import Client as TwilioClient +from common_module.log.logger import logger +from urllib.parse import quote + + +class TwilioService: + def __init__(self, call_processing_base_url: str): + self.call_processing_base_url = call_processing_base_url + + if not self.call_processing_base_url: + raise ValueError( + 'call_processing_base_url is required in voice_agents config' + ) + + def initiate_call( + self, + to_number: str, + from_number: str, + voice_agent_id: str, + welcome_message_audio_url: str, + account_sid: str, + auth_token: str, + ) -> dict: + """ + Initiates an outbound call using Twilio + + Args: + to_number: Destination phone number + from_number: Source phone number (must be a Twilio number) + voice_agent_id: ID of the voice agent + welcome_message_audio_url: URL of the welcome message audio file + account_sid: Twilio account SID + auth_token: Twilio auth token + + Returns: + dict: Call details including call_sid and status + """ + try: + # Create Twilio client + client = TwilioClient(account_sid, auth_token) + + # Build TwiML URL that Twilio will call + # URL-encode the presigned URL to safely pass as query parameter (FastAPI will decode it) + encoded_audio_url = quote(welcome_message_audio_url, safe='') + twiml_url = f'{self.call_processing_base_url}/webhooks/twiml?voice_agent_id={voice_agent_id}&welcome_message_audio_url={encoded_audio_url}' + + logger.info( + f'Initiating call from {from_number} to {to_number} for agent {voice_agent_id}' + ) + + # Create the call + call = client.calls.create( + to=to_number, from_=from_number, url=twiml_url, method='POST' + ) + + logger.info(f'Call created successfully. Call SID: {call.sid}') + + return { + 'call_sid': call.sid, + 'status': call.status, + 'to_number': to_number, + 'from_number': from_number, + } + + except Exception as e: + logger.error(f'Failed to initiate call: {str(e)}') + raise ValueError(f'Failed to initiate call with Twilio: {str(e)}') diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/services/voice_agent_service.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/voice_agent_service.py new file mode 100644 index 00000000..7cbd32e4 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/services/voice_agent_service.py @@ -0,0 +1,490 @@ +import json +import uuid +from typing import List, Optional +from uuid import UUID + +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from db_repo_module.models.voice_agent import VoiceAgent +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from flo_cloud.cloud_storage import CloudStorageManager +from voice_agents_module.services.telephony_config_service import ( + TelephonyConfigService, +) +from voice_agents_module.services.tts_config_service import TtsConfigService +from voice_agents_module.services.stt_config_service import SttConfigService +from voice_agents_module.services.tts_generator_service import TTSGeneratorService +from voice_agents_module.utils.cache_utils import ( + get_voice_agent_cache_key, + get_voice_agents_list_cache_key, + get_welcome_message_url_cache_key, +) +from voice_agents_module.utils.cache_invalidation import ( + invalidate_call_processing_cache, +) +from voice_agents_module.utils.storage_utils import generate_welcome_message_key + + +class VoiceAgentService: + """Service for handling voice agent CRUD operations with caching""" + + def __init__( + self, + voice_agent_repository: SQLAlchemyRepository[VoiceAgent], + telephony_config_service: TelephonyConfigService, + tts_config_service: TtsConfigService, + stt_config_service: SttConfigService, + llm_config_repository: SQLAlchemyRepository[LlmInferenceConfig], + cache_manager: CacheManager, + tts_generator_service: TTSGeneratorService, + cloud_storage_manager: CloudStorageManager, + voice_agent_bucket: str, + ): + """ + Initialize the voice agent service + + Args: + voice_agent_repository: Repository for voice agents + telephony_config_service: Service for telephony configs + tts_config_service: Service for TTS configs + stt_config_service: Service for STT configs + llm_config_repository: Repository for LLM inference configs + cache_manager: Cache manager instance + tts_generator_service: Service for generating TTS audio + cloud_storage_manager: Cloud storage manager for uploading audio + voice_agent_bucket: Bucket name for storing voice agent audio files + """ + self.voice_agent_repository = voice_agent_repository + self.telephony_config_service = telephony_config_service + self.tts_config_service = tts_config_service + self.stt_config_service = stt_config_service + self.llm_config_repository = llm_config_repository + self.cache_manager = cache_manager + self.tts_generator_service = tts_generator_service + self.cloud_storage_manager = cloud_storage_manager + self.voice_agent_bucket = voice_agent_bucket + self.voice_agent_cache_time = 3600 * 24 + + async def _validate_foreign_keys( + self, + llm_config_id: UUID, + tts_config_id: UUID, + stt_config_id: UUID, + telephony_config_id: UUID, + ) -> tuple[bool, Optional[str]]: + """ + Validate that all foreign key IDs exist and are not deleted + + Args: + llm_config_id: LLM config ID + tts_config_id: TTS config ID + stt_config_id: STT config ID + telephony_config_id: Telephony config ID + + Returns: + Tuple of (is_valid, error_message) + """ + # Validate LLM config + llm_config = await self.llm_config_repository.find_one( + id=llm_config_id, is_deleted=False + ) + if not llm_config: + return False, f'LLM config with ID {llm_config_id} not found or deleted' + + # Validate TTS config + tts_config = await self.tts_config_service.get_config(tts_config_id) + if not tts_config: + return False, f'TTS config with ID {tts_config_id} not found or deleted' + + # Validate STT config + stt_config = await self.stt_config_service.get_config(stt_config_id) + if not stt_config: + return False, f'STT config with ID {stt_config_id} not found or deleted' + + # Validate Telephony config + telephony_config = await self.telephony_config_service.get_config( + telephony_config_id + ) + if not telephony_config: + return ( + False, + f'Telephony config with ID {telephony_config_id} not found or deleted', + ) + + return True, None + + async def _generate_and_upload_welcome_audio( + self, welcome_message: str, tts_config_id: UUID, agent_id: UUID + ) -> None: + """ + Generate TTS audio for welcome message and upload to cloud storage + + Args: + welcome_message: Text of the welcome message + tts_config_id: TTS config ID to use for generation + agent_id: Voice agent ID (used for generating storage key) + + Raises: + Exception: If TTS generation or upload fails + """ + logger.info(f'Generating welcome message audio for agent {agent_id}') + + # Fetch TTS config + tts_config = await self.tts_config_service.get_config(tts_config_id) + if not tts_config: + raise ValueError(f'TTS config {tts_config_id} not found') + + # Generate audio using TTS service + try: + audio_bytes = await self.tts_generator_service.generate_audio( + welcome_message, tts_config + ) + logger.info(f'Generated audio: {len(audio_bytes)} bytes') + except Exception as e: + logger.error(f'Failed to generate TTS audio: {str(e)}') + raise Exception(f'TTS generation failed: {str(e)}') + + # Upload to cloud storage + try: + storage_key = generate_welcome_message_key(agent_id) + self.cloud_storage_manager.save_small_file( + audio_bytes, + self.voice_agent_bucket, + storage_key, + content_type='audio/mpeg', + ) + logger.info(f'Uploaded welcome message audio with key: {storage_key}') + except Exception as e: + logger.error(f'Failed to upload audio to cloud storage: {str(e)}') + raise Exception(f'Audio upload failed: {str(e)}') + + # Invalidate cached presigned URL since we uploaded new audio + url_cache_key = get_welcome_message_url_cache_key(agent_id) + self.cache_manager.remove(url_cache_key) + + async def create_agent( + self, + name: str, + llm_config_id: UUID, + tts_config_id: UUID, + stt_config_id: UUID, + telephony_config_id: UUID, + system_prompt: str, + welcome_message: str, + description: Optional[str] = None, + conversation_config: Optional[dict] = None, + status: str = 'inactive', + ) -> dict: + """ + Create a new voice agent + + Args: + name: Name of the voice agent + llm_config_id: LLM config ID + tts_config_id: TTS config ID + stt_config_id: STT config ID + telephony_config_id: Telephony config ID + system_prompt: System prompt for the agent + welcome_message: Welcome message text (will be converted to audio) + description: Description of the agent (optional) + conversation_config: Conversation configuration (optional) + status: Agent status (default: inactive) + + Returns: + Created voice agent as dict + + Raises: + ValueError: If any foreign key validation fails + Exception: If TTS generation or upload fails + """ + logger.info(f'Creating voice agent: {name}') + + # Validate all foreign keys + is_valid, error_message = await self._validate_foreign_keys( + llm_config_id, tts_config_id, stt_config_id, telephony_config_id + ) + if not is_valid: + logger.error(f'FK validation failed: {error_message}') + raise ValueError(error_message) + + # Generate agent ID first + agent_id = uuid.uuid4() + + # Generate and upload welcome message audio BEFORE creating agent + # If this fails, no agent record is created + await self._generate_and_upload_welcome_audio( + welcome_message, tts_config_id, agent_id + ) + + # Create agent only if audio generation succeeded + agent = await self.voice_agent_repository.create( + id=agent_id, + name=name, + description=description, + llm_config_id=llm_config_id, + tts_config_id=tts_config_id, + stt_config_id=stt_config_id, + telephony_config_id=telephony_config_id, + system_prompt=system_prompt, + conversation_config=json.dumps(conversation_config) + if conversation_config + else None, + welcome_message=welcome_message, + status=status, + ) + + # Convert to dict + agent_dict = agent.to_dict() + + # Cache the agent + cache_key = get_voice_agent_cache_key(agent.id) + self.cache_manager.add( + cache_key, json.dumps(agent_dict), expiry=self.voice_agent_cache_time + ) + + # Invalidate list cache + list_cache_key = get_voice_agents_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('voice_agent', agent.id, 'create') + + logger.info(f'Successfully created voice agent with id: {agent.id}') + return agent_dict + + async def get_agent(self, agent_id: UUID) -> Optional[dict]: + """ + Get a voice agent by ID (with caching) + + Args: + agent_id: UUID of the agent + + Returns: + Voice agent as dict or None if not found + """ + cache_key = get_voice_agent_cache_key(agent_id) + + # Try cache first + cached_agent_str = self.cache_manager.get_str(cache_key) + if cached_agent_str: + logger.info(f'Cache hit for voice agent: {agent_id}') + return json.loads(cached_agent_str) + + # Cache miss - fetch from DB + logger.info(f'Cache miss - fetching voice agent from DB: {agent_id}') + agent = await self.voice_agent_repository.find_one( + id=agent_id, is_deleted=False + ) + + if agent: + # Convert to dict and cache + agent_dict = agent.to_dict() + self.cache_manager.add( + cache_key, json.dumps(agent_dict), expiry=self.voice_agent_cache_time + ) + return agent_dict + + return None + + async def list_agents(self) -> List[dict]: + """ + List all voice agents (with caching) + + Returns: + List of voice agents as dicts + """ + list_cache_key = get_voice_agents_list_cache_key() + + # Try cache first + cached_list_str = self.cache_manager.get_str(list_cache_key) + if cached_list_str: + logger.info('Cache hit for voice agents list') + return json.loads(cached_list_str) + + # Cache miss - fetch from DB + logger.info('Cache miss - fetching voice agents list from DB') + agents = await self.voice_agent_repository.find(is_deleted=False) + + # Convert to dicts and cache + agents_dicts = [agent.to_dict() for agent in agents] + self.cache_manager.add( + list_cache_key, json.dumps(agents_dicts), expiry=self.voice_agent_cache_time + ) + + return agents_dicts + + async def update_agent(self, agent_id: UUID, **update_data) -> Optional[dict]: + """ + Update a voice agent + + Args: + agent_id: UUID of the agent + **update_data: Fields to update + + Returns: + Updated agent as dict or None if not found + + Raises: + ValueError: If any foreign key validation fails + Exception: If TTS generation or upload fails + """ + logger.info(f'Updating voice agent: {agent_id}') + + existing_agent = await self.voice_agent_repository.find_one( + id=agent_id, is_deleted=False + ) + if not existing_agent: + return None + + # Check if welcome_message is being updated + welcome_message_changed = False + if ( + 'welcome_message' in update_data + and update_data['welcome_message'] != existing_agent.welcome_message + ): + welcome_message_changed = True + new_welcome_message = update_data['welcome_message'] + + # If any FK fields are being updated, validate them + if any( + key in update_data + for key in [ + 'llm_config_id', + 'tts_config_id', + 'stt_config_id', + 'telephony_config_id', + ] + ): + # Build the full set of FK IDs (use existing if not being updated) + llm_config_id = update_data.get( + 'llm_config_id', existing_agent.llm_config_id + ) + tts_config_id = update_data.get( + 'tts_config_id', existing_agent.tts_config_id + ) + stt_config_id = update_data.get( + 'stt_config_id', existing_agent.stt_config_id + ) + telephony_config_id = update_data.get( + 'telephony_config_id', existing_agent.telephony_config_id + ) + + is_valid, error_message = await self._validate_foreign_keys( + llm_config_id, tts_config_id, stt_config_id, telephony_config_id + ) + if not is_valid: + logger.error(f'FK validation failed: {error_message}') + raise ValueError(error_message) + + # If welcome message changed, regenerate audio + if welcome_message_changed: + logger.info('Welcome message changed, regenerating audio') + try: + # Use updated tts_config_id if provided, otherwise use existing + tts_config_id = update_data.get( + 'tts_config_id', existing_agent.tts_config_id + ) + await self._generate_and_upload_welcome_audio( + new_welcome_message, tts_config_id, agent_id + ) + except Exception as e: + logger.error(f'Failed to regenerate welcome audio: {str(e)}') + raise e + + updated_agent = await self.voice_agent_repository.find_one_and_update( + {'id': agent_id}, refresh=True, **update_data + ) + + # Invalidate caches + cache_key = get_voice_agent_cache_key(agent_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_voice_agents_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('voice_agent', agent_id, 'update') + + logger.info(f'Successfully updated voice agent: {agent_id}') + return updated_agent.to_dict() + + async def get_welcome_message_audio_url(self, agent_id: UUID) -> str: + """ + Generate presigned URL for agent's welcome message audio (with caching) + + Args: + agent_id: UUID of the voice agent + + Returns: + str: Presigned HTTPS URL (2-hour expiration) or empty string if no welcome message + + Raises: + Exception: If presigned URL generation fails + """ + url_cache_key = get_welcome_message_url_cache_key(agent_id) + + # Try cache first + cached_url = self.cache_manager.get_str(url_cache_key) + if cached_url: + logger.info(f'Cache hit for welcome message URL for agent {agent_id}') + return cached_url + + try: + # Generate storage key from agent ID + storage_key = generate_welcome_message_key(agent_id) + + # Generate presigned URL with 2-hour expiration + presigned_url = self.cloud_storage_manager.generate_presigned_url( + bucket_name=self.voice_agent_bucket, + key=storage_key, + type='get', + expiresIn=7200, # 2 hours in seconds + ) + + # Cache the URL with expiry just under 2 hours (100 second buffer) + self.cache_manager.add(url_cache_key, presigned_url, expiry=7100) + + logger.info(f'Generated and cached presigned URL for agent {agent_id}') + return presigned_url + + except Exception as e: + logger.error( + f'Failed to generate presigned URL for agent {agent_id}: {str(e)}' + ) + raise Exception(f'Failed to generate welcome message audio URL: {str(e)}') + + async def delete_agent(self, agent_id: UUID) -> bool: + """ + Delete a voice agent (soft delete) + + Args: + agent_id: UUID of the agent + + Returns: + True if deleted, False if not found + """ + logger.info(f'Deleting voice agent: {agent_id}') + + existing_agent = await self.voice_agent_repository.find_one( + id=agent_id, is_deleted=False + ) + if not existing_agent: + return False + + await self.voice_agent_repository.find_one_and_update( + {'id': agent_id}, is_deleted=True + ) + + # Invalidate caches + cache_key = get_voice_agent_cache_key(agent_id) + self.cache_manager.remove(cache_key) + + list_cache_key = get_voice_agents_list_cache_key() + self.cache_manager.remove(list_cache_key) + + # Invalidate cache in call_processing + await invalidate_call_processing_cache('voice_agent', agent_id, 'delete') + + logger.info(f'Successfully deleted voice agent: {agent_id}') + return True diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_invalidation.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_invalidation.py new file mode 100644 index 00000000..099a91c4 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_invalidation.py @@ -0,0 +1,78 @@ +"""Utility for invalidating cache in call_processing app""" + +import os +import httpx +from uuid import UUID +from common_module.log.logger import logger + + +async def invalidate_call_processing_cache( + config_type: str, + config_id: UUID, + operation: str = 'update', +) -> bool: + """ + Invalidate cache in call_processing app + + Args: + config_type: Type of config (voice_agent, tts_config, stt_config, telephony_config) + config_id: UUID of the config + operation: Operation type (create, update, or delete) + + Returns: + True if successful, False otherwise (never raises exceptions) + Logs warnings on failures but doesn't break the main operation + """ + call_processing_base_url = os.getenv('CALL_PROCESSING_BASE_URL') + passthrough_secret = os.getenv('PASSTHROUGH_SECRET') + + if not call_processing_base_url or not passthrough_secret: + logger.warning( + f'Cache invalidation skipped for {config_type} {config_id}: ' + f'CALL_PROCESSING_BASE_URL or PASSTHROUGH_SECRET not configured' + ) + return False + + url = f'{call_processing_base_url.rstrip("/")}/api/cache/invalidate' + headers = { + 'Content-Type': 'application/json', + 'X-Passthrough': passthrough_secret, + } + payload = {'config_type': config_type, 'config_id': str(config_id)} + + try: + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post(url, json=payload, headers=headers) + + if response.status_code in [200, 201]: + logger.info( + f'Successfully invalidated cache for {config_type} {config_id} ' + f'(operation: {operation})' + ) + return True + else: + logger.warning( + f'Cache invalidation failed for {config_type} {config_id}: ' + f'HTTP {response.status_code} - {response.text}' + ) + return False + + except httpx.TimeoutException as e: + logger.warning( + f'Cache invalidation timeout for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.' + ) + return False + except httpx.RequestError as e: + logger.warning( + f'Cache invalidation request error for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.' + ) + return False + except Exception as e: + logger.warning( + f'Unexpected error during cache invalidation for {config_type} {config_id}: {e}. ' + f'Continuing with main operation.', + exc_info=True, + ) + return False diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_utils.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_utils.py new file mode 100644 index 00000000..688f23a6 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/cache_utils.py @@ -0,0 +1,46 @@ +from uuid import UUID + + +def get_telephony_config_cache_key(config_id: UUID) -> str: + """Generate cache key for a telephony config""" + return f'telephony_config:{config_id}' + + +def get_telephony_configs_list_cache_key() -> str: + """Generate cache key for telephony configs list""" + return 'telephony_configs:list' + + +def get_tts_config_cache_key(config_id: UUID) -> str: + """Generate cache key for a TTS config""" + return f'tts_config:{config_id}' + + +def get_tts_configs_list_cache_key() -> str: + """Generate cache key for TTS configs list""" + return 'tts_configs:list' + + +def get_stt_config_cache_key(config_id: UUID) -> str: + """Generate cache key for an STT config""" + return f'stt_config:{config_id}' + + +def get_stt_configs_list_cache_key() -> str: + """Generate cache key for STT configs list""" + return 'stt_configs:list' + + +def get_voice_agent_cache_key(agent_id: UUID) -> str: + """Generate cache key for a voice agent""" + return f'voice_agent:{agent_id}' + + +def get_voice_agents_list_cache_key() -> str: + """Generate cache key for voice agents list""" + return 'voice_agents:list' + + +def get_welcome_message_url_cache_key(agent_id: UUID) -> str: + """Generate cache key for a voice agent's welcome message presigned URL""" + return f'voice_agent_welcome_url:{agent_id}' diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/storage_utils.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/storage_utils.py new file mode 100644 index 00000000..b1b70714 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/storage_utils.py @@ -0,0 +1,18 @@ +""" +Storage utility functions for voice agents module. +""" + +import uuid + + +def generate_welcome_message_key(voice_agent_id: uuid.UUID) -> str: + """ + Generate cloud storage key for voice agent welcome message audio. + + Args: + voice_agent_id: UUID of the voice agent + + Returns: + str: Cloud storage key in format /voice_agents/{voice_agent_id}.mp3 + """ + return f'voice_agents/{voice_agent_id}.mp3' diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/telephony_utils.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/telephony_utils.py new file mode 100644 index 00000000..6d22e4f6 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/utils/telephony_utils.py @@ -0,0 +1,86 @@ +""" +Telephony utility functions for URL generation and SIP URI construction. +""" + + +def get_sip_uri(sip_config: dict, phone_number: str) -> str: + """ + Construct SIP URI from SIP configuration and phone number. + + Args: + sip_config: SIP configuration dict with sip_domain, optional port and transport + phone_number: Phone number to call (e.g., '+1234567890') + + Returns: + SIP URI string (e.g., 'sip:+1234567890@pstn.twilio.com') + + Examples: + - Basic: 'sip:+1234567890@example.sip.daily.co' + - With port: 'sip:+1234567890@sip.twilio.com:5061' + - With transport: 'sip:+1234567890@sip.twilio.com;transport=tls' + """ + domain = sip_config['sip_domain'] + + # Add optional port if specified + if 'port' in sip_config and sip_config['port']: + domain = f"{domain}:{sip_config['port']}" + + uri = f'sip:{phone_number}@{domain}' + + # Add transport parameter if specified + if 'transport' in sip_config and sip_config['transport']: + uri += f";transport={sip_config['transport']}" + + return uri + + +def get_websocket_url(call_id: str, base_url: str) -> str: + """ + Auto-generate WebSocket URL for media streaming. + + This URL is used by Twilio to stream real-time audio to our call processing app. + + Args: + call_id: Unique identifier for the call + base_url: Base URL of the call processing app (from CALL_PROCESSING_PUBLIC_URL env var) + + Returns: + WebSocket URL string + + Example: + 'wss://call-processing.example.com/webhooks/twilio/media/abc-123' + """ + return f'{base_url}/webhooks/twilio/media/{call_id}' + + +def validate_phone_number(phone_number: str) -> bool: + """ + Validate phone number format (E.164 format recommended). + + Args: + phone_number: Phone number string + + Returns: + True if valid format, False otherwise + + Note: + This is a basic validation. E.164 format: +[country code][number] + Example: +14155552671 + """ + if not phone_number: + return False + + # Basic E.164 validation: starts with +, contains only digits after + + if not phone_number.startswith('+'): + return False + + # Check remaining characters are digits + phone_digits = phone_number[1:] + if not phone_digits.isdigit(): + return False + + # E.164 allows 1-15 digits after country code + if len(phone_digits) < 1 or len(phone_digits) > 15: + return False + + return True diff --git a/wavefront/server/modules/voice_agents_module/voice_agents_module/voice_agents_container.py b/wavefront/server/modules/voice_agents_module/voice_agents_module/voice_agents_container.py new file mode 100644 index 00000000..f799c4f8 --- /dev/null +++ b/wavefront/server/modules/voice_agents_module/voice_agents_module/voice_agents_container.py @@ -0,0 +1,95 @@ +from dependency_injector import containers, providers + +from db_repo_module.models.telephony_config import TelephonyConfig +from db_repo_module.models.tts_config import TtsConfig +from db_repo_module.models.stt_config import SttConfig +from db_repo_module.models.voice_agent import VoiceAgent +from db_repo_module.models.llm_inference_config import LlmInferenceConfig +from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository +from voice_agents_module.services.telephony_config_service import TelephonyConfigService +from voice_agents_module.services.tts_config_service import TtsConfigService +from voice_agents_module.services.stt_config_service import SttConfigService +from voice_agents_module.services.voice_agent_service import VoiceAgentService +from voice_agents_module.services.twilio_service import TwilioService +from voice_agents_module.services.tts_generator_service import TTSGeneratorService + + +class VoiceAgentsContainer(containers.DeclarativeContainer): + config = providers.Configuration(ini_files=['config.ini']) + + # External dependencies + db_client = providers.Dependency() + cache_manager = providers.Dependency() + cloud_storage_manager = providers.Dependency() + + # Repositories + telephony_config_repository = providers.Singleton( + SQLAlchemyRepository[TelephonyConfig], + model=TelephonyConfig, + db_client=db_client, + ) + + tts_config_repository = providers.Singleton( + SQLAlchemyRepository[TtsConfig], + model=TtsConfig, + db_client=db_client, + ) + + stt_config_repository = providers.Singleton( + SQLAlchemyRepository[SttConfig], + model=SttConfig, + db_client=db_client, + ) + + llm_config_repository = providers.Singleton( + SQLAlchemyRepository[LlmInferenceConfig], + model=LlmInferenceConfig, + db_client=db_client, + ) + + voice_agent_repository = providers.Singleton( + SQLAlchemyRepository[VoiceAgent], + model=VoiceAgent, + db_client=db_client, + ) + + # Services + telephony_config_service = providers.Singleton( + TelephonyConfigService, + telephony_config_repository=telephony_config_repository, + cache_manager=cache_manager, + ) + + tts_config_service = providers.Singleton( + TtsConfigService, + tts_config_repository=tts_config_repository, + cache_manager=cache_manager, + ) + + stt_config_service = providers.Singleton( + SttConfigService, + stt_config_repository=stt_config_repository, + cache_manager=cache_manager, + ) + + tts_generator_service = providers.Singleton( + TTSGeneratorService, + ) + + voice_agent_service = providers.Singleton( + VoiceAgentService, + voice_agent_repository=voice_agent_repository, + telephony_config_service=telephony_config_service, + tts_config_service=tts_config_service, + stt_config_service=stt_config_service, + llm_config_repository=llm_config_repository, + cache_manager=cache_manager, + tts_generator_service=tts_generator_service, + cloud_storage_manager=cloud_storage_manager, + voice_agent_bucket=config.voice_agents.voice_agent_bucket, + ) + + twilio_service = providers.Singleton( + TwilioService, + call_processing_base_url=config.voice_agents.call_processing_base_url, + ) diff --git a/wavefront/server/packages/flo_cloud/README.md b/wavefront/server/packages/flo_cloud/README.md new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/_types/__init__.py b/wavefront/server/packages/flo_cloud/flo_cloud/_types/__init__.py new file mode 100644 index 00000000..57b3ac7e --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/_types/__init__.py @@ -0,0 +1,19 @@ +from enum import Enum +from .kms import FloKMS +from .cloud_storage import CloudStorageHandler +from .message_queue import MessageQueue, MessageQueueDict + + +class CloudProvider(str, Enum): + AWS = 'aws' + GCP = 'gcp' + AZURE = 'azure' + + +__all__ = [ + 'CloudProvider', + 'FloKMS', + 'CloudStorageHandler', + 'MessageQueue', + 'MessageQueueDict', +] diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/_types/cloud_storage.py b/wavefront/server/packages/flo_cloud/flo_cloud/_types/cloud_storage.py new file mode 100644 index 00000000..175d5914 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/_types/cloud_storage.py @@ -0,0 +1,105 @@ +from abc import ABC, abstractmethod +from typing import List, Tuple, Optional + + +class CloudStorageHandler(ABC): + """Abstract base class for cloud storage operations""" + + @abstractmethod + def get_file(self, bucket_name: str, file_path: str) -> bytes: + """ + Abstract method to get file from bucket and return as buffer + + Args: + bucket_name (str): Name of the bucket + file_path (str): Path to the file in bucket + + Returns: + File content as bytes + """ + pass + + @abstractmethod + def save_large_file( + self, + data: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """ + Save large file to cloud storage using streaming/multipart upload. + + Use this method for large files that benefit from streaming uploads + and automatic multipart handling to optimize memory usage. + + Args: + data: File data in bytes + bucket_name: Name of the storage bucket + key: Object key/path for the file in the bucket + content_type: MIME type of the file (e.g., 'image/jpeg', 'application/pdf'). + If None, the cloud provider will use its default. + + Raises: + Exception: If upload fails + """ + pass + + @abstractmethod + def save_small_file( + self, + file_content: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """ + Save small file to cloud storage using direct upload. + + Use this method for small files that can be uploaded efficiently + in a single operation without streaming. + + Args: + file_content: File content in bytes + bucket_name: Name of the storage bucket + key: Object key/path for the file in the bucket + content_type: MIME type of the file (e.g., 'image/jpeg', 'application/pdf'). + If None, the cloud provider will use its default. + + Raises: + Exception: If upload fails + """ + pass + + @abstractmethod + def get_bucket_key(self, value: str): + """ """ + pass + + @abstractmethod + def generate_presigned_url( + self, bucket_name: str, key: str, type: str, expiresIn: int = 300 + ) -> str: + """ """ + pass + + @abstractmethod + def list_files( + self, bucket_name: str, prefix: str, page_size: int = 50, page_number: int = 1 + ) -> Tuple[List[str], bool]: + """ + List files in a bucket with prefix filtering and pagination. + + Args: + bucket_name (str): Name of the bucket + prefix (str): Prefix to filter files + page_size (int): Number of files per page (default: 50) + page_number (int): Which page to retrieve, 1-based (default: 1) + + Returns: + Tuple[List[str], bool]: (list of file keys/paths, has_next_page) + + Raises: + Exception: If listing fails + """ + pass diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/_types/kms.py b/wavefront/server/packages/flo_cloud/flo_cloud/_types/kms.py new file mode 100644 index 00000000..9be93e7a --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/_types/kms.py @@ -0,0 +1,23 @@ +from abc import ABC, abstractmethod + + +class FloKMS(ABC): + @abstractmethod + def encrypt(self, plaintext: str) -> bytes: + pass + + @abstractmethod + def decrypt(self, ciphertext: str) -> bytes: + pass + + @abstractmethod + def sign(self, message: bytes, **kwargs) -> bytes: + pass + + @abstractmethod + def verify(self, message: bytes, signature: bytes, **kwargs) -> bool: + pass + + @abstractmethod + def get_public_key_pem(self, **kwargs) -> bytes | str: + pass diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/_types/message_queue.py b/wavefront/server/packages/flo_cloud/flo_cloud/_types/message_queue.py new file mode 100644 index 00000000..a3a9f705 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/_types/message_queue.py @@ -0,0 +1,37 @@ +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, List + + +@dataclass +class MessageQueueDict: + body: Any + ack_id: str + id: str + + +class MessageQueue(ABC): + @abstractmethod + def receive_messages( + self, max_messages=10, wait_time_sec=20 + ) -> List[MessageQueueDict] | None: + """ + Receive messages from the event queue. + + Returns: + List of dicts, each with keys: + - 'body': message content (any type) + - 'ack_id': acknowledgement ID (str) + - 'id': message ID (str) + """ + pass + + @abstractmethod + def delete_message(self, ack_id: str): + pass + + @abstractmethod + def add_message( + self, message_body: dict, topic_name_or_queue_url: str | None = None + ) -> str: + pass diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/aws/kms.py b/wavefront/server/packages/flo_cloud/flo_cloud/aws/kms.py new file mode 100644 index 00000000..0c2a149c --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/aws/kms.py @@ -0,0 +1,67 @@ +import os +import boto3 +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.backends import default_backend +from .._types import FloKMS + +aws_kms_arn = os.getenv('AWS_KMS_ARN') +aws_region = os.getenv('AWS_REGION') + + +class AwsKMS(FloKMS): + def __init__(self): + if not all([aws_region, aws_kms_arn]): + raise ValueError('Region and KMS ARN must be set') + + self.aws_kms_arn = aws_kms_arn + self.aws_region = aws_region + self.kms_client = boto3.client('kms', region_name=aws_region) + + def encrypt(self, plaintext: str) -> bytes: + return self.kms_client.encrypt(KeyId=self.aws_kms_arn, Plaintext=plaintext) + + def decrypt(self, ciphertext: str) -> bytes: + return self.kms_client.decrypt( + KeyId=self.aws_kms_arn, CiphertextBlob=ciphertext + ) + + def sign(self, message: bytes, **kwargs) -> bytes: + signing_algorithm = kwargs.get('signing_algorithm', 'RSASSA_PSS_SHA_256') + message_type = kwargs.get('message_type', 'DIGEST') + + response = self.kms_client.sign( + KeyId=self.aws_kms_arn, + Message=message, + MessageType=message_type, + SigningAlgorithm=signing_algorithm, + ) + return response['Signature'] + + def verify(self, message: bytes, signature: bytes, **kwargs) -> bool: + signing_algorithm = kwargs.get('signing_algorithm', 'RSASSA_PSS_SHA_256') + message_type = kwargs.get('message_type', 'DIGEST') + + response = self.kms_client.verify( + KeyId=self.aws_kms_arn, + Message=message, + MessageType=message_type, + Signature=signature, + SigningAlgorithm=signing_algorithm, + ) + return response['SignatureValid'] + + def get_public_key_pem(self, **kwargs) -> str | bytes: + response = self.kms_client.get_public_key( + KeyId=self.aws_kms_arn, + ) + public_key_der = response['PublicKey'] + public_key = serialization.load_der_public_key( + public_key_der, default_backend() + ) + + pem_bytes = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + return pem_bytes.decode('utf-8') diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/aws/redshift.py b/wavefront/server/packages/flo_cloud/flo_cloud/aws/redshift.py new file mode 100644 index 00000000..24c44ab3 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/aws/redshift.py @@ -0,0 +1,652 @@ +import os +import logging +from typing import List, Dict, Any, Optional, Tuple +from contextlib import contextmanager +import redshift_connector +from redshift_connector import Connection, Error as RedshiftError + +logger = logging.getLogger(__name__) + + +class RedshiftClient: + """ + Comprehensive Redshift client using redshift-connector library. + Provides all essential database operations for Amazon Redshift. + """ + + def __init__( + self, + host: Optional[str] = None, + port: int = 5439, + database: Optional[str] = None, + user: Optional[str] = None, + password: Optional[str] = None, + cluster_identifier: Optional[str] = None, + iam_profile: Optional[str] = None, + region: Optional[str] = None, + ssl: bool = True, + tcp_keepalive: bool = True, + timeout: int = 60, + **kwargs, + ): + """ + Initialize Redshift client with connection parameters. + + Args: + host: Redshift cluster endpoint + port: Redshift port (default: 5439) + database: Database name + user: Username for authentication + password: Password for authentication + cluster_identifier: Redshift cluster identifier for IAM auth + iam_profile: IAM profile name for IAM authentication + region: AWS region + ssl: Enable SSL connection (default: True) + timeout: Connection timeout in seconds + **kwargs: Additional connection parameters + """ + self.host = host or os.getenv('REDSHIFT_HOST') + self.port = port + self.database = database or os.getenv('REDSHIFT_DATABASE') + self.user = user or os.getenv('REDSHIFT_USER') + self.password = password or os.getenv('REDSHIFT_PASSWORD') + self.cluster_identifier = cluster_identifier or os.getenv( + 'REDSHIFT_CLUSTER_IDENTIFIER' + ) + self.iam_profile = iam_profile or os.getenv('REDSHIFT_IAM_PROFILE') + self.region = region or os.getenv('AWS_REGION') + self.ssl = ssl + self.timeout = timeout + self.connection_params = kwargs + self.tcp_keepalive = tcp_keepalive + + if not self.host: + raise ValueError( + 'Redshift host must be provided via parameter or REDSHIFT_HOST environment variable' + ) + + if not self.database: + raise ValueError( + 'Database name must be provided via parameter or REDSHIFT_DATABASE environment variable' + ) + + def _get_connection_params(self) -> Dict[str, Any]: + """Get connection parameters for redshift-connector.""" + params = { + 'host': self.host, + 'port': self.port, + 'database': self.database, + 'ssl': self.ssl, + 'timeout': self.timeout, + 'tcp_keepalive': self.tcp_keepalive, + **self.connection_params, + } + + # Use IAM authentication if cluster_identifier is provided + if self.cluster_identifier: + params.update( + { + 'cluster_identifier': self.cluster_identifier, + 'region': self.region, + 'iam_profile': self.iam_profile, + } + ) + else: + # Use username/password authentication + if not self.user or not self.password: + raise ValueError( + 'Username and password must be provided for non-IAM authentication' + ) + params.update({'user': self.user, 'password': self.password}) + + return params + + @contextmanager + def get_connection(self): + """Context manager for database connections.""" + connection = None + try: + redshift_connector.paramstyle = 'named' + connection = redshift_connector.connect(**self._get_connection_params()) + yield connection + except RedshiftError as e: + logger.error(f'Redshift connection error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error connecting to Redshift: {e}') + raise + finally: + if connection: + connection.close() + + @contextmanager + def get_cursor(self, connection: Optional[Connection] = None): + """Context manager for database cursors.""" + if connection: + cursor = connection.cursor() + try: + yield cursor + finally: + cursor.close() + else: + with self.get_connection() as conn: + cursor = conn.cursor() + try: + yield cursor + finally: + cursor.close() + + def execute_query( + self, query: str, params: Optional[Dict[str, Any]] = None + ) -> List[Tuple]: + """ + Execute a SELECT query and return results. + + Args: + query: SQL query to execute + params: Query parameters (optional) + + Returns: + List of tuples containing query results + """ + with self.get_cursor() as cursor: + try: + cursor.execute(query, params) + return cursor.fetchall() + except RedshiftError as e: + logger.error(f'Query execution error: {e}') + raise + + def execute_query_to_dict( + self, + projection: str = '*', + table_name: str = '', + where_clause: str = 'true', + params: Optional[Dict[str, Any]] = None, + limit: int = 20, + offset: int = 0, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + """ + Execute a SELECT query and return results as dictionaries. + + Args: + projection: Projection of the query + table_name: Table name + where_clause: Where clause of the query + params: Query parameters (optional) + limit: Maximum number of rows to return + offset: Number of rows to skip + Returns: + List of dictionaries containing query results + """ + query = f'SELECT {projection} FROM {table_name} WHERE {where_clause} {group_by} {order_by} LIMIT {limit} OFFSET {offset}' + with self.get_cursor() as cursor: + try: + cursor.execute(query, params) + columns = [desc[0] for desc in cursor.description] + return [dict(zip(columns, row)) for row in cursor.fetchall()] + except RedshiftError as e: + logger.error(f'Query execution error: {e}') + raise + + def execute_command( + self, command: str, params: Optional[Dict[str, Any]] = None + ) -> int: + """ + Execute a non-SELECT command (INSERT, UPDATE, DELETE, DDL). + + Args: + command: SQL command to execute + params: Command parameters (optional) + + Returns: + Number of affected rows + """ + with self.get_cursor() as cursor: + try: + cursor.execute(command, params) + return cursor.rowcount + except RedshiftError as e: + logger.error(f'Command execution error: {e}') + raise + + def execute_many(self, command: str, params_list: List[Dict[str, Any]]) -> int: + """ + Execute a command with multiple parameter sets (batch operations). + + Args: + command: SQL command to execute + params_list: List of parameter tuples + + Returns: + Number of affected rows + """ + with self.get_cursor() as cursor: + try: + cursor.executemany(command, params_list) + return cursor.rowcount + except RedshiftError as e: + logger.error(f'Batch execution error: {e}') + raise + + def execute_transaction( + self, commands: List[Tuple[str, Optional[Dict[str, Any]]]] + ) -> bool: + """ + Execute multiple commands in a transaction. + + Args: + commands: List of (command, params) tuples + + Returns: + True if transaction succeeded, False otherwise + """ + with self.get_connection() as connection: + cursor = connection.cursor() + try: + for command, params in commands: + cursor.execute(command, params) + connection.commit() + return True + except RedshiftError as e: + connection.rollback() + logger.error(f'Transaction error: {e}') + raise + finally: + cursor.close() + + def copy_from_s3( + self, + table_name: str, + s3_path: str, + aws_access_key_id: Optional[str] = None, + aws_secret_access_key: Optional[str] = None, + aws_session_token: Optional[str] = None, + region: Optional[str] = None, + delimiter: str = ',', + format_type: str = 'CSV', + header: bool = True, + compression: Optional[str] = None, + **kwargs, + ) -> int: + """ + Copy data from S3 to Redshift table. + + Args: + table_name: Target table name + s3_path: S3 path (s3://bucket/key) + aws_access_key_id: AWS access key + aws_secret_access_key: AWS secret key + aws_session_token: AWS session token + region: AWS region + delimiter: Field delimiter + format_type: File format (CSV, JSON, etc.) + header: Whether file has header row + compression: Compression type (GZIP, BZIP2, etc.) + **kwargs: Additional COPY command options + + Returns: + Number of rows copied + """ + # Build COPY command + copy_command = f"COPY {table_name} FROM '{s3_path}'" + + # Add credentials + if aws_access_key_id and aws_secret_access_key: + copy_command += f" CREDENTIALS 'aws_access_key_id={aws_access_key_id};aws_secret_access_key={aws_secret_access_key}'" + if aws_session_token: + copy_command += f';token={aws_session_token}' + else: + copy_command += ' IAM_ROLE default' + + # Add format options + copy_command += f' FORMAT AS {format_type}' + if delimiter != ',': + copy_command += f" DELIMITER '{delimiter}'" + if header: + copy_command += ' HEADER' + if compression: + copy_command += f' COMPUPDATE {compression}' + + # Add additional options + for key, value in kwargs.items(): + copy_command += f' {key.upper()} {value}' + + return self.execute_command(copy_command) + + def copy_to_s3( + self, + query: str, + s3_path: str, + aws_access_key_id: Optional[str] = None, + aws_secret_access_key: Optional[str] = None, + aws_session_token: Optional[str] = None, + delimiter: str = ',', + format_type: str = 'CSV', + header: bool = True, + compression: Optional[str] = None, + **kwargs, + ) -> int: + """ + Copy query results to S3. + + Args: + query: SQL query to execute + s3_path: S3 path (s3://bucket/key) + aws_access_key_id: AWS access key + aws_secret_access_key: AWS secret key + aws_session_token: AWS session token + delimiter: Field delimiter + format_type: File format (CSV, JSON, etc.) + header: Whether to include header row + compression: Compression type (GZIP, BZIP2, etc.) + **kwargs: Additional UNLOAD command options + + Returns: + Number of rows unloaded + """ + # Build UNLOAD command + unload_command = f"UNLOAD ('{query}') TO '{s3_path}'" + + # Add credentials + if aws_access_key_id and aws_secret_access_key: + unload_command += f" CREDENTIALS 'aws_access_key_id={aws_access_key_id};aws_secret_access_key={aws_secret_access_key}'" + if aws_session_token: + unload_command += f';token={aws_session_token}' + else: + unload_command += ' IAM_ROLE default' + + # Add format options + unload_command += f' FORMAT AS {format_type}' + if delimiter != ',': + unload_command += f" DELIMITER '{delimiter}'" + if header: + unload_command += ' HEADER' + if compression: + unload_command += f' COMPRESSION {compression}' + + # Add additional options + for key, value in kwargs.items(): + unload_command += f' {key.upper()} {value}' + + return self.execute_command(unload_command) + + def get_table_info(self, table_name: str) -> Dict[str, Any]: + """ + Get detailed information about a table. + + Args: + table_name: Name of the table + + Returns: + Dictionary containing table information + """ + query = """ + SELECT + c.column_name, + c.data_type, + c.character_maximum_length, + c.numeric_precision, + c.numeric_scale, + c.is_nullable, + c.column_default, + c.ordinal_position + FROM information_schema.columns c + WHERE c.table_name = :table_name + ORDER BY c.ordinal_position + """ + + columns = self.execute_query_to_dict(query, {'table_name': table_name}) + + # Get table statistics + stats_query = """ + SELECT + schemaname, + tablename, + attname, + n_distinct, + most_common_vals, + most_common_freqs + FROM pg_stats + WHERE tablename = :tablename + """ + + stats = self.execute_query_to_dict(stats_query, {'tablename': table_name}) + + return {'table_name': table_name, 'columns': columns, 'statistics': stats} + + def list_tables(self, schema: str = 'public') -> List[str]: + """ + List all tables in a schema. + + Args: + schema: Schema name (default: 'public') + + Returns: + List of table names + """ + query = """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema = :table_schema AND table_type = 'BASE TABLE' + ORDER BY table_name + """ + + results = self.execute_query(query, {'table_schema': schema}) + return [row[0] for row in results] + + def get_table_size(self, table_name: str, schema: str = 'public') -> Dict[str, Any]: + """ + Get table size information. + + Args: + table_name: Name of the table + schema: Schema name (default: 'public') + + Returns: + Dictionary containing size information + """ + query = """ + SELECT + schemaname, + tablename, + attname, + n_distinct, + most_common_vals, + most_common_freqs + FROM pg_stats + WHERE tablename = :tablename AND schemaname = :schemaname + """ + + results = self.execute_query_to_dict( + query, {'tablename': table_name, 'schemaname': schema} + ) + + # Get size in bytes + size_query = """ + SELECT + pg_size_pretty(pg_total_relation_size(:tablename)) as size_pretty, + pg_total_relation_size(:tablename) as size_bytes + """ + + size_results = self.execute_query_to_dict( + size_query, + { + 'tablename': f'{schema}.{table_name}', + 'schemaname': f'{schema}.{table_name}', + }, + ) + + return { + 'table_name': table_name, + 'schema': schema, + 'statistics': results, + 'size': size_results[0] if size_results else {}, + } + + def analyze_table(self, table_name: str, schema: str = 'public') -> bool: + """ + Run ANALYZE on a table to update statistics. + + Args: + table_name: Name of the table + schema: Schema name (default: 'public') + + Returns: + True if successful + """ + command = f'ANALYZE {schema}.{table_name}' + try: + self.execute_command(command) + return True + except RedshiftError as e: + logger.error(f'ANALYZE failed for {schema}.{table_name}: {e}') + return False + + def vacuum_table( + self, table_name: str, schema: str = 'public', full: bool = False + ) -> bool: + """ + Run VACUUM on a table to reclaim storage and sort rows. + + Args: + table_name: Name of the table + schema: Schema name (default: 'public') + full: Whether to run FULL VACUUM + + Returns: + True if successful + """ + command = f"VACUUM {'FULL ' if full else ''}{schema}.{table_name}" + try: + self.execute_command(command) + return True + except RedshiftError as e: + logger.error(f'VACUUM failed for {schema}.{table_name}: {e}') + return False + + def get_query_plan(self, query: str) -> List[Dict[str, Any]]: + """ + Get the execution plan for a query. + + Args: + query: SQL query to analyze + + Returns: + List of dictionaries containing execution plan details + """ + explain_query = f'EXPLAIN {query}' + results = self.execute_query(explain_query) + + # Parse the explain output + plan_lines = [row[0] for row in results] + return [{'plan_line': line} for line in plan_lines] + + def cancel_query(self, query_id: str) -> bool: + """ + Cancel a running query. + + Args: + query_id: Query ID to cancel + + Returns: + True if cancellation was successful + """ + try: + self.execute_command(f'CANCEL {query_id}') + return True + except RedshiftError as e: + logger.error(f'Failed to cancel query {query_id}: {e}') + return False + + def get_active_queries(self) -> List[Dict[str, Any]]: + """ + Get list of currently active queries. + + Returns: + List of dictionaries containing active query information + """ + query = """ + SELECT + pid, + usename, + query, + state, + starttime, + query_start, + state_change + FROM pg_stat_activity + WHERE state != 'idle' AND query NOT LIKE '%pg_stat_activity%' + ORDER BY starttime DESC + """ + + return self.execute_query_to_dict(query) + + def get_query_history(self, limit: int = 100) -> List[Dict[str, Any]]: + """ + Get recent query history from STL_QUERY table. + + Args: + limit: Maximum number of queries to return + + Returns: + List of dictionaries containing query history + """ + query = """ + SELECT + query, + starttime, + endtime, + elapsed, + aborted, + userid, + pid + FROM stl_query + ORDER BY starttime DESC + LIMIT :limit + """ + + return self.execute_query_to_dict(query, {'limit': limit}) + + def test_connection(self) -> bool: + """ + Test the database connection. + + Returns: + True if connection is successful + """ + try: + result = self.execute_query('SELECT 1') + return len(result) > 0 and result[0][0] == 1 + except Exception as e: + logger.error(f'Connection test failed: {e}') + return False + + def get_cluster_info(self) -> Dict[str, Any]: + """ + Get Redshift cluster information. + + Returns: + Dictionary containing cluster information + """ + queries = { + 'version': 'SELECT version()', + 'current_database': 'SELECT current_database()', + 'current_user': 'SELECT current_user', + 'current_schema': 'SELECT current_schema()', + 'session_id': 'SELECT session_id()', + 'node_count': 'SELECT COUNT(*) FROM stv_slices', + } + + info = {} + for key, query in queries.items(): + try: + result = self.execute_query(query) + info[key] = result[0][0] if result else None + except Exception as e: + logger.warning(f'Failed to get {key}: {e}') + info[key] = None + + return info diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/aws/s3.py b/wavefront/server/packages/flo_cloud/flo_cloud/aws/s3.py new file mode 100644 index 00000000..689745be --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/aws/s3.py @@ -0,0 +1,203 @@ +from itertools import islice +import boto3 +import io +from typing import Optional, List, Tuple +from botocore.exceptions import ClientError +from .._types import CloudStorageHandler +from ..exceptions import CloudStorageFileNotFoundError +import re + + +class S3Storage(CloudStorageHandler): + """AWS S3 implementation""" + + def __init__( + self, + aws_access_key_id: Optional[str] = None, + aws_secret_access_key: Optional[str] = None, + region_name: Optional[str] = None, + ): + """ + Initialize AWS client + + Args: + aws_access_key_id: AWS access key ID (optional) + aws_secret_access_key: AWS secret access key (optional) + region_name: AWS region name (optional) + """ + self.s3_client = boto3.client('s3') + + def get_file(self, bucket_name: str, file_path: str) -> bytes: + """ + Get file from S3 bucket and return as buffer + + Args: + bucket_name (str): Name of the S3 bucket + file_path (str): Path to the file in bucket + + Returns: + File content as bytes + + Raises: + CloudStorageFileNotFoundError: If file not found + Exception: If other errors occur + """ + try: + s3_response = self.s3_client.get_object(Bucket=bucket_name, Key=file_path) + file_content = s3_response['Body'].read() + + return file_content + except ClientError as e: + # Check if the error is specifically "NoSuchKey" or "NoSuchBucket" + if e.response['Error']['Code'] in ['NoSuchKey', 'NoSuchBucket']: + raise CloudStorageFileNotFoundError(bucket_name, file_path) + else: + # Re-raise other ClientError exceptions + raise Exception(f'Error reading file from S3: {str(e)}') + except Exception as e: + raise Exception(f'Error reading file from S3: {str(e)}') + + def save_large_file( + self, + data: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """S3 implementation of large file upload using upload_fileobj.""" + try: + fileobj = io.BytesIO(data) + extra_args = {} + if content_type is not None: + extra_args['ContentType'] = content_type + + if extra_args: + self.s3_client.upload_fileobj( + fileobj, bucket_name, key, ExtraArgs=extra_args + ) + else: + self.s3_client.upload_fileobj(fileobj, bucket_name, key) + except Exception as e: + raise Exception(f'Error uploading large file to S3: {str(e)}') + + def save_small_file( + self, + file_content: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """S3 implementation of small file upload using put_object.""" + try: + kwargs = {'Bucket': bucket_name, 'Key': key, 'Body': file_content} + if content_type is not None: + kwargs['ContentType'] = content_type + + self.s3_client.put_object(**kwargs) + except Exception as e: + raise Exception(f'Error uploading small file to S3: {str(e)}') + + def get_bucket_key(self, value: str): + match = re.match(r's3://([^/]+)/(.+)', value) + bucket_name = match.group(1) + key = match.group(2) + return bucket_name, key + + def generate_presigned_url( + self, bucket_name: str, key: str, type: str = 'get_object', expiresIn: int = 300 + ) -> str: + """ + Generate a presigned URL for an S3 object. + + Args: + bucket_name (str): Name of the S3 bucket + key (str): Key of the object in the bucket + type (str): Type of operation (e.g., 'get_object', 'put_object') + expiresIn (int): Expiration time in seconds (default: 300) + + Returns: + str: Presigned URL + + Raises: + Exception: If URL generation fails + """ + try: + presigned_url = self.s3_client.generate_presigned_url( + type, + Params={'Bucket': bucket_name, 'Key': key}, + ExpiresIn=expiresIn, + ) + return presigned_url + except Exception as e: + raise Exception(f'Error generating presigned URL for S3: {str(e)}') + + def list_files( + self, bucket_name: str, prefix: str, page_size: int = 50, page_number: int = 1 + ) -> Tuple[List[str], bool]: + """ + List files in an S3 bucket with prefix filtering and pagination. + Optimized to use server-side pagination efficiently. + + Args: + bucket_name (str): Name of the S3 bucket + prefix (str): Prefix to filter files + page_size (int): Number of files per page (default: 50) + page_number (int): Which page to retrieve, 1-based (default: 1) + + Returns: + Tuple[List[str], bool]: (list of object keys, has_next_page) + + Raises: + Exception: If listing fails + """ + try: + if page_number < 1: + raise ValueError('page_number must be >= 1') + if page_size < 1: + raise ValueError('page_size must be >= 1') + + paginator = self.s3_client.get_paginator('list_objects_v2') + + # Create a flat, memory-efficient iterator over all objects + page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix) + item_iterator = page_iterator.search('Contents') + + # Calculate the start and end index for the desired page slice + start_index = (page_number - 1) * page_size + # Fetch one extra item to check if there is a next page + end_index = start_index + page_size + 1 + + # Use islice to efficiently advance the iterator and get only our page + page_slice = islice(item_iterator, start_index, end_index) + + # Extract the 'Key' from the dictionaries in the slice + file_keys = [item['Key'] for item in page_slice if item is not None] + + # Determine if there's a next page + has_next_page = len(file_keys) > page_size + + # Return only the requested page size + if has_next_page: + return file_keys[:page_size], True + else: + return file_keys, False + + except ClientError as e: + if e.response['Error']['Code'] == 'NoSuchBucket': + raise Exception(f'Bucket {bucket_name} not found') + else: + raise Exception(f'Error listing files from S3: {str(e)}') + except Exception as e: + raise Exception(f'Error listing files from S3: {str(e)}') + + def delete_file(self, bucket_name: str, file_path: str) -> None: + """ + Delete file from S3 bucket + Args: + bucket_name: Name of the S3 bucket + file_path: Path to the file in bucket + """ + try: + self.s3_client.delete_object(Bucket=bucket_name, Key=file_path) + except Exception as e: + raise Exception(f'Error deleting file from S3: {str(e)}') diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/aws/sqs.py b/wavefront/server/packages/flo_cloud/flo_cloud/aws/sqs.py new file mode 100644 index 00000000..a4a2f242 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/aws/sqs.py @@ -0,0 +1,80 @@ +import os +from typing import List +import boto3 +import json +from .._types import MessageQueue, MessageQueueDict + +queue_url = os.getenv('QUEUE_URL') + + +class SQSQueue(MessageQueue): + def __init__(self): + self.sqs_client = boto3.client('sqs') + self.queue_url = queue_url + + def receive_messages( + self, max_messages=10, wait_time_sec=20, **kwargs + ) -> List[MessageQueueDict] | None: + try: + response = self.sqs_client.receive_message( + QueueUrl=self.queue_url, + MaxNumberOfMessages=max_messages, + WaitTimeSeconds=wait_time_sec, + VisibilityTimeout=kwargs.get('visibility_timeout', 300), + ) + + if 'Messages' not in response: + return None + + messages = [] + for message in response['Messages']: + body = json.loads(message['Body']) + messages.append( + MessageQueueDict( + body=body, + ack_id=message['ReceiptHandle'], + id=message['MessageId'], + ) + ) + + return messages + except Exception as e: + raise e + + def delete_message(self, ack_id: str): + try: + self.sqs_client.delete_message( + QueueUrl=self.queue_url, ReceiptHandle=ack_id + ) + except Exception as e: + raise e + + def add_message( + self, message_body: dict, topic_name_or_queue_url: str = None, **attributes + ) -> str: + try: + # Use provided queue_url or fall back to default + target_queue_url = topic_name_or_queue_url or self.queue_url + + message_data = json.dumps(message_body) + + # Prepare message parameters + message_params = {'QueueUrl': target_queue_url, 'MessageBody': message_data} + + # Add message attributes if provided + if attributes: + message_attributes = {} + for key, value in attributes.items(): + message_attributes[key] = { + 'StringValue': str(value), + 'DataType': 'String', + } + message_params['MessageAttributes'] = message_attributes + + # Send the message + response = self.sqs_client.send_message(**message_params) + + return response['MessageId'] + + except Exception as e: + raise e diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py b/wavefront/server/packages/flo_cloud/flo_cloud/azure/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/cloud_storage.py b/wavefront/server/packages/flo_cloud/flo_cloud/cloud_storage.py new file mode 100644 index 00000000..cc703b71 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/cloud_storage.py @@ -0,0 +1,189 @@ +from io import BytesIO +from typing import Union, List, Tuple, Optional +from .aws.s3 import S3Storage +from .gcp.gcs import GCSStorage +from ._types import CloudStorageHandler, CloudProvider + + +class CloudStorageFactory: + """Factory class to create appropriate cloud storage handler""" + + @staticmethod + def get_handler( + provider: Union[str, CloudProvider], **credentials + ) -> CloudStorageHandler: + """ + Create and return appropriate cloud storage handler based on provider + + Args: + provider: Cloud provider (either string or CloudProvider enum) + **credentials: Keyword arguments for provider-specific credentials + + Returns: + CloudStorageHandler: Appropriate handler instance + + Raises: + ValueError: If provider is not supported + """ + if isinstance(provider, str): + provider = CloudProvider(provider.lower()) + + if provider == CloudProvider.AWS: + return S3Storage() + elif provider == CloudProvider.GCP: + return GCSStorage() + else: + raise ValueError(f'Unsupported cloud provider: {provider}') + + +class CloudStorageManager: + """Manager class to handle cloud storage operations""" + + def __init__(self, provider: Union[str, CloudProvider], **credentials): + """ + Initialize storage manager with specified provider + + Args: + provider: Cloud provider (either string or CloudProvider enum) + **credentials: Provider-specific credentials + """ + self.handler = CloudStorageFactory.get_handler(provider, **credentials) + if isinstance(provider, str): + provider = CloudProvider(provider.lower()) + self.provider = provider + + def _convert_to_valid_type(self, type: str) -> str: + """ + Convert a generic type (get, put, post) to the provider-specific operation string. + + Args: + type: The generic operation type ('get', 'put', 'post') + provider: The cloud provider (CloudProvider.AWS or CloudProvider.GCP) + + Returns: + str: The provider-specific operation string + + Raises: + ValueError: If the type or provider is not supported + """ + type = type.lower() + if self.provider == CloudProvider.AWS: + if type == 'get' or type == 'get_object': + return 'get_object' + elif type == 'put' or type == 'put_object': + return 'put_object' + elif type == 'post' or type == 'post_object': + return 'post_object' + elif self.provider == CloudProvider.GCP: + if type == 'get_object' or type == 'get': + return 'GET' + elif type == 'put_object' or type == 'put': + return 'PUT' + elif type == 'post_object' or type == 'post': + return 'POST' + raise ValueError(f"Unsupported type '{type}' for provider '{self.provider}'") + + def read_file(self, bucket_name: str, file_path: str) -> BytesIO: + """ + Read file from cloud storage + + Args: + bucket_name: Name of the bucket + file_path: Path to the file in bucket + + Returns: + BytesIO: File contents as a buffer + """ + return self.handler.get_file(bucket_name, file_path) + + def save_large_file( + self, + data: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """ + Save large file to cloud storage using streaming/multipart upload. + + Args: + data: File data in bytes + bucket_name: Name of the storage bucket + key: Object key/path for the file in the bucket + content_type: MIME type of the file (e.g., 'image/jpeg', 'application/pdf'). + If None, the cloud provider will use its default. + + Returns: + None + """ + self.handler.save_large_file(data, bucket_name, key, content_type) + + def save_small_file( + self, + file_content: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """ + Save small file to cloud storage using direct upload. + + Args: + file_content: File content in bytes + bucket_name: Name of the storage bucket + key: Object key/path for the file in the bucket + content_type: MIME type of the file (e.g., 'image/jpeg', 'application/pdf'). + If None, the cloud provider will use its default. + + Returns: + None + """ + self.handler.save_small_file(file_content, bucket_name, key, content_type) + + def file_protocol(self) -> str: + return ( + 's3' if self.provider == 'aws' else 'gs' if self.provider == 'gcp' else None + ) + + def get_bucket_key(self, value) -> str: + return self.handler.get_bucket_key(value) + + def generate_presigned_url( + self, bucket_name: str, key: str, type: str, expiresIn: int = 300 + ) -> str: + try: + valid_type = self._convert_to_valid_type(type) + return self.handler.generate_presigned_url( + bucket_name, key, valid_type, expiresIn + ) + except Exception as e: + raise e + + def list_files( + self, bucket_name: str, prefix: str, page_size: int = 50, page_number: int = 1 + ) -> Tuple[List[str], bool]: + """ + List files in cloud storage bucket with prefix filtering and pagination. + + Args: + bucket_name (str): Name of the bucket + prefix (str): Prefix to filter files + page_size (int): Number of files per page (default: 50) + page_number (int): Which page to retrieve, 1-based (default: 1) + + Returns: + Tuple[List[str], bool]: (list of file keys/paths, has_next_page) + + Raises: + Exception: If listing fails + """ + return self.handler.list_files(bucket_name, prefix, page_size, page_number) + + def delete_file(self, bucket_name: str, file_path: str) -> None: + """ + Delete file from cloud storage + Args: + bucket_name: Name of the bucket + file_path: Path to the file in bucket + """ + return self.handler.delete_file(bucket_name, file_path) diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/exceptions.py b/wavefront/server/packages/flo_cloud/flo_cloud/exceptions.py new file mode 100644 index 00000000..15afd3f0 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/exceptions.py @@ -0,0 +1,20 @@ +""" +Custom exceptions for flo_cloud package +""" + + +class CloudStorageError(Exception): + """Base exception for cloud storage operations""" + + pass + + +class CloudStorageFileNotFoundError(CloudStorageError): + """Exception raised when a file is not found in cloud storage""" + + def __init__(self, bucket_name: str, file_path: str, message: str = None): + self.bucket_name = bucket_name + self.file_path = file_path + if message is None: + message = f"File not found in bucket '{bucket_name}' at path '{file_path}'" + super().__init__(message) diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/gcp/bigquery.py b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/bigquery.py new file mode 100644 index 00000000..219388c8 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/bigquery.py @@ -0,0 +1,1684 @@ +import os +import string +import logging +import asyncio +from google.oauth2 import service_account +from typing import List, Dict, Any, Optional, Union +from datetime import datetime + +from google.cloud import bigquery +from google.cloud.bigquery import ( + Dataset, + Table, + SchemaField, + QueryJob, + LoadJob, + CopyJob, + ExtractJob, + TimePartitioning, + DestinationFormat, + SourceFormat, + WriteDisposition, + CreateDisposition, + DatasetReference, + TableReference, +) +from google.cloud.bigquery.job import ( + QueryJobConfig, + LoadJobConfig, + CopyJobConfig, + ExtractJobConfig, +) +from google.cloud.bigquery.table import TableListItem +from google.cloud.exceptions import GoogleCloudError + + +logger = logging.getLogger(__name__) + + +class BigQueryClient: + """ + Comprehensive BigQuery client using google-cloud-bigquery library. + Provides all essential operations for Google BigQuery. + """ + + def __init__( + self, + project_id: Optional[str] = None, + location: Optional[str] = None, + credentials_path: Optional[str] = None, + credentials_json: Optional[dict] = None, + timeout: int = 300, + **kwargs, + ): + """ + Initialize BigQuery client with connection parameters. + + Args: + project_id: Google Cloud project ID + location: BigQuery dataset location (e.g., 'US', 'EU', 'asia-northeast1') + credentials_path: Path to service account JSON file + credentials_json: Service account JSON string + timeout: Query timeout in seconds + **kwargs: Additional client parameters + """ + self.project_id = project_id or os.getenv('GOOGLE_CLOUD_PROJECT') + self.location = location or os.getenv('BIGQUERY_LOCATION', 'asia-south1') + self.timeout = timeout + self.client_params = kwargs + + if not self.project_id: + raise ValueError( + 'Project ID must be provided via parameter or GOOGLE_CLOUD_PROJECT environment variable' + ) + + # Initialize credentials + credentials = None + if credentials_path: + credentials = service_account.Credentials.from_service_account_file( + credentials_path + ) + elif credentials_json: + credentials = service_account.Credentials.from_service_account_info( + credentials_json + ) + + # Initialize BigQuery client + if credentials: + self.client = bigquery.Client( + project=self.project_id, credentials=credentials, **self.client_params + ) + else: + # Falls back to default credentials (application default credentials) + self.client = bigquery.Client(project=self.project_id, **self.client_params) + + def _get_dataset_ref(self, dataset_id: str) -> DatasetReference: + """Get dataset reference.""" + return DatasetReference(self.project_id, dataset_id) + + def _get_table_ref(self, dataset_id: str, table_id: str) -> TableReference: + """Get table reference.""" + dataset_ref = self._get_dataset_ref(dataset_id) + return TableReference(dataset_ref, table_id) + + def _get_query_params(self, params: Optional[dict] = None): + """Get query parameters.""" + query_params = [] + for key, value in params.items(): + if isinstance(value, str): + query_params.append(bigquery.ScalarQueryParameter(key, 'STRING', value)) + elif isinstance(value, int): + query_params.append(bigquery.ScalarQueryParameter(key, 'INT64', value)) + elif isinstance(value, float): + query_params.append( + bigquery.ScalarQueryParameter(key, 'FLOAT64', value) + ) + elif isinstance(value, bool): + query_params.append(bigquery.ScalarQueryParameter(key, 'BOOL', value)) + elif isinstance(value, datetime): + query_params.append( + bigquery.ScalarQueryParameter(key, 'TIMESTAMP', value) + ) + else: + query_params.append( + bigquery.ScalarQueryParameter(key, 'STRING', str(value)) + ) + return query_params + + async def execute_query( + self, + query: str, + use_legacy_sql: bool = False, + dry_run: bool = False, + params: Optional[dict] = None, + **kwargs, + ) -> QueryJob: + """ + Execute a BigQuery SQL query. + + Args: + query: SQL query to execute + use_legacy_sql: Whether to use legacy SQL (default: False) + dry_run: Whether to perform a dry run (default: False) + **kwargs: Additional query configuration parameters + + Returns: + QueryJob object + """ + try: + job_config = QueryJobConfig( + use_legacy_sql=use_legacy_sql, dry_run=dry_run, **kwargs + ) + if params: + job_config.query_parameters = self._get_query_params(params) + + # Run the blocking query operation in a thread pool + query_job = await asyncio.to_thread( + self.client.query, query, job_config=job_config + ) + + if not dry_run: + # Run the blocking result() call in a thread pool + await asyncio.to_thread(query_job.result, timeout=self.timeout) + + return query_job + + except GoogleCloudError as e: + logger.error(f'BigQuery query execution error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error executing BigQuery query: {e}') + raise + + def execute_query_to_dataframe( + self, query: str, use_legacy_sql: bool = False, **kwargs + ): + """ + Execute a query and return results as a pandas DataFrame. + + Args: + query: SQL query to execute + use_legacy_sql: Whether to use legacy SQL + **kwargs: Additional query configuration parameters + + Returns: + pandas DataFrame with query results + """ + try: + job_config = QueryJobConfig(use_legacy_sql=use_legacy_sql, **kwargs) + + df = self.client.query(query, job_config=job_config).to_dataframe() + return df + + except ImportError: + raise ImportError( + 'pandas is required for this method. Install with: pip install pandas' + ) + except GoogleCloudError as e: + logger.error(f'BigQuery query execution error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error executing BigQuery query: {e}') + raise + + def execute_query_to_dict( + self, + projection: str = '*', + table_prefix: str = '', + table_names: List[str] = [], + where_clause: str = 'true', + join_query: Optional[str] = None, + params: Optional[dict] = None, + limit: int = 10, + offset: int = 0, + use_legacy_sql: bool = False, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + **kwargs, + ) -> Union[List[Dict[str, Any]], List[Dict[str, Any]]]: + """ + Execute a query and return results as a list of dictionaries or structured list for joins. + + Args: + projection: Projection of the query + table_prefix: Prefix for table names + table_names: List of table names + where_clause: Where clause of the query + join_query: Join query string (optional) + params: Query parameters (optional) + limit: Maximum number of rows to return + offset: Number of rows to skip + use_legacy_sql: Whether to use legacy SQL + **kwargs: Additional query configuration parameters + + Returns: + If join_query is provided: + List of dictionaries, each containing main table fields and child table objects + Otherwise: + List of dictionaries containing query results + """ + + if join_query: + query = self.__get_join_query( + join_query, + table_names, + table_prefix, + projection, + where_clause, + limit, + offset, + order_by, + group_by=group_by, + ) + else: + group_by_clause = f'GROUP BY {group_by}' if group_by else '' + order_by_clause = f'ORDER BY {order_by}' if order_by else '' + query = f'SELECT {projection} FROM `{table_prefix}{table_names[0]}` AS a WHERE {where_clause} {group_by_clause} {order_by_clause} LIMIT {limit} OFFSET {offset}' + + try: + job_config = QueryJobConfig(use_legacy_sql=use_legacy_sql, **kwargs) + if params: + job_config.query_parameters = self._get_query_params(params) + + query_job = self.client.query(query, job_config=job_config) + results = query_job.result(timeout=self.timeout) + + return [dict(row.items()) for row in results] + + except GoogleCloudError as e: + logger.error(f'BigQuery query execution error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error executing BigQuery query: {e}') + raise + + def create_dataset( + self, + dataset_id: str, + description: Optional[str] = None, + location: Optional[str] = None, + **kwargs, + ) -> Dataset: + """ + Create a new BigQuery dataset. + + Args: + dataset_id: Dataset ID + description: Dataset description + location: Dataset location + **kwargs: Additional dataset configuration parameters + + Returns: + Created Dataset object + """ + try: + dataset_ref = self._get_dataset_ref(dataset_id) + dataset = Dataset(dataset_ref) + + if description: + dataset.description = description + if location: + dataset.location = location + + # Set additional properties + for key, value in kwargs.items(): + if hasattr(dataset, key): + setattr(dataset, key, value) + + dataset = self.client.create_dataset(dataset, timeout=self.timeout) + logger.info(f'Created dataset {dataset_id}') + return dataset + + except GoogleCloudError as e: + logger.error(f'Error creating dataset {dataset_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error creating dataset {dataset_id}: {e}') + raise + + def delete_dataset( + self, dataset_id: str, delete_contents: bool = False, not_found_ok: bool = False + ) -> bool: + """ + Delete a BigQuery dataset. + + Args: + dataset_id: Dataset ID + delete_contents: Whether to delete all tables in the dataset + not_found_ok: Whether to ignore if dataset doesn't exist + + Returns: + True if dataset was deleted, False otherwise + """ + try: + dataset_ref = self._get_dataset_ref(dataset_id) + self.client.delete_dataset( + dataset_ref, delete_contents=delete_contents, not_found_ok=not_found_ok + ) + logger.info(f'Deleted dataset {dataset_id}') + return True + + except GoogleCloudError as e: + if not_found_ok and 'Not found' in str(e): + return False + logger.error(f'Error deleting dataset {dataset_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error deleting dataset {dataset_id}: {e}') + raise + + def list_datasets(self, **kwargs) -> List[Dataset]: + """ + List all datasets in the project. + + Args: + **kwargs: Additional list parameters + + Returns: + List of Dataset objects + """ + try: + datasets = list(self.client.list_datasets(**kwargs)) + return datasets + + except GoogleCloudError as e: + logger.error(f'Error listing datasets: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error listing datasets: {e}') + raise + + def create_table( + self, + dataset_id: str, + table_id: str, + schema: Optional[List[SchemaField]] = None, + description: Optional[str] = None, + time_partitioning: Optional[TimePartitioning] = None, + clustering_fields: Optional[List[str]] = None, + **kwargs, + ) -> Table: + """ + Create a new BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + schema: Table schema (list of SchemaField objects) + description: Table description + time_partitioning: Time partitioning configuration + clustering_fields: List of field names for clustering + **kwargs: Additional table configuration parameters + + Returns: + Created Table object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + table = Table(table_ref, schema=schema) + + if description: + table.description = description + if time_partitioning: + table.time_partitioning = time_partitioning + if clustering_fields: + table.clustering_fields = clustering_fields + + # Set additional properties + for key, value in kwargs.items(): + if hasattr(table, key): + setattr(table, key, value) + + table = self.client.create_table(table, timeout=self.timeout) + logger.info(f'Created table {dataset_id}.{table_id}') + return table + + except GoogleCloudError as e: + logger.error(f'Error creating table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error creating table {dataset_id}.{table_id}: {e}' + ) + raise + + def delete_table( + self, dataset_id: str, table_id: str, not_found_ok: bool = False + ) -> bool: + """ + Delete a BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + not_found_ok: Whether to ignore if table doesn't exist + + Returns: + True if table was deleted, False otherwise + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + self.client.delete_table(table_ref, not_found_ok=not_found_ok) + logger.info(f'Deleted table {dataset_id}.{table_id}') + return True + + except GoogleCloudError as e: + if not_found_ok and 'Not found' in str(e): + return False + logger.error(f'Error deleting table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error deleting table {dataset_id}.{table_id}: {e}' + ) + raise + + def list_tables(self, dataset_id: str, **kwargs) -> List[TableListItem]: + """ + List all tables in a dataset. + + Args: + dataset_id: Dataset ID + **kwargs: Additional list parameters + + Returns: + List of Table objects + """ + try: + dataset_ref = self._get_dataset_ref(dataset_id) + tables = list(self.client.list_tables(dataset_ref, **kwargs)) + return tables + + except GoogleCloudError as e: + logger.error(f'Error listing tables in dataset {dataset_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error listing tables in dataset {dataset_id}: {e}' + ) + raise + + def get_table(self, dataset_id: str, table_id: str) -> Table: + """ + Get a BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + Table object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + table = self.client.get_table(table_ref) + return table + + except GoogleCloudError as e: + logger.error(f'Error getting table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error getting table {dataset_id}.{table_id}: {e}') + raise + + def update_table(self, dataset_id: str, table_id: str, **kwargs) -> Table: + """ + Update a BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + **kwargs: Table properties to update + + Returns: + Updated Table object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + table = self.client.get_table(table_ref) + + # Update table properties + for key, value in kwargs.items(): + if hasattr(table, key): + setattr(table, key, value) + + updated_table = self.client.update_table(table, fields=list(kwargs.keys())) + logger.info(f'Updated table {dataset_id}.{table_id}') + return updated_table + + except GoogleCloudError as e: + logger.error(f'Error updating table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error updating table {dataset_id}.{table_id}: {e}' + ) + raise + + def load_table_from_dataframe( + self, + dataframe, + dataset_id: str, + table_id: str, + write_disposition: WriteDisposition = WriteDisposition.WRITE_APPEND, + create_disposition: CreateDisposition = CreateDisposition.CREATE_IF_NEEDED, + schema: Optional[List[SchemaField]] = None, + **kwargs, + ) -> LoadJob: + """ + Load data from a pandas DataFrame into a BigQuery table. + + Args: + dataframe: pandas DataFrame to load + dataset_id: Dataset ID + table_id: Table ID + write_disposition: Write disposition (WRITE_TRUNCATE, WRITE_APPEND, WRITE_EMPTY) + create_disposition: Create disposition (CREATE_IF_NEEDED, CREATE_NEVER) + schema: Table schema (optional) + **kwargs: Additional load configuration parameters + + Returns: + LoadJob object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + + job_config = LoadJobConfig( + write_disposition=write_disposition, + create_disposition=create_disposition, + schema=schema, + **kwargs, + ) + + load_job = self.client.load_table_from_dataframe( + dataframe, table_ref, job_config=job_config + ) + load_job.result(timeout=self.timeout) + + logger.info(f'Loaded {len(dataframe)} rows into {dataset_id}.{table_id}') + return load_job + + except ImportError: + raise ImportError( + 'pandas is required for this method. Install with: pip install pandas' + ) + except GoogleCloudError as e: + logger.error(f'Error loading data into {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error loading data into {dataset_id}.{table_id}: {e}' + ) + raise + + def load_table_from_file( + self, + source_file: str, + dataset_id: str, + table_id: str, + source_format: SourceFormat = SourceFormat.CSV, + write_disposition: WriteDisposition = WriteDisposition.WRITE_APPEND, + create_disposition: CreateDisposition = CreateDisposition.CREATE_IF_NEEDED, + schema: Optional[List[SchemaField]] = None, + **kwargs, + ) -> LoadJob: + """ + Load data from a file into a BigQuery table. + + Args: + source_file: Path to source file + dataset_id: Dataset ID + table_id: Table ID + source_format: Source file format (CSV, JSON, AVRO, PARQUET, etc.) + write_disposition: Write disposition + create_disposition: Create disposition + schema: Table schema (optional) + **kwargs: Additional load configuration parameters + + Returns: + LoadJob object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + + job_config = LoadJobConfig( + source_format=source_format, + write_disposition=write_disposition, + create_disposition=create_disposition, + schema=schema, + **kwargs, + ) + + with open(source_file, 'rb') as source_file_obj: + load_job = self.client.load_table_from_file( + source_file_obj, table_ref, job_config=job_config + ) + load_job.result(timeout=self.timeout) + + logger.info(f'Loaded data from {source_file} into {dataset_id}.{table_id}') + return load_job + + except GoogleCloudError as e: + logger.error( + f'Error loading data from {source_file} into {dataset_id}.{table_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error loading data from {source_file} into {dataset_id}.{table_id}: {e}' + ) + raise + + def load_table_from_uri( + self, + source_uris: Union[str, List[str]], + dataset_id: str, + table_id: str, + source_format: SourceFormat = SourceFormat.CSV, + write_disposition: WriteDisposition = WriteDisposition.WRITE_APPEND, + create_disposition: CreateDisposition = CreateDisposition.CREATE_IF_NEEDED, + schema: Optional[List[SchemaField]] = None, + **kwargs, + ) -> LoadJob: + """ + Load data from Google Cloud Storage URIs into a BigQuery table. + + Args: + source_uris: GCS URI(s) (gs://bucket/path/to/file) + dataset_id: Dataset ID + table_id: Table ID + source_format: Source file format + write_disposition: Write disposition + create_disposition: Create disposition + schema: Table schema (optional) + **kwargs: Additional load configuration parameters + + Returns: + LoadJob object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + + job_config = LoadJobConfig( + source_format=source_format, + write_disposition=write_disposition, + create_disposition=create_disposition, + schema=schema, + **kwargs, + ) + + load_job = self.client.load_table_from_uri( + source_uris, table_ref, job_config=job_config + ) + load_job.result(timeout=self.timeout) + + logger.info(f'Loaded data from {source_uris} into {dataset_id}.{table_id}') + return load_job + + except GoogleCloudError as e: + logger.error( + f'Error loading data from {source_uris} into {dataset_id}.{table_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error loading data from {source_uris} into {dataset_id}.{table_id}: {e}' + ) + raise + + def copy_table( + self, + source_dataset_id: str, + source_table_id: str, + destination_dataset_id: str, + destination_table_id: str, + write_disposition: WriteDisposition = WriteDisposition.WRITE_TRUNCATE, + create_disposition: CreateDisposition = CreateDisposition.CREATE_IF_NEEDED, + **kwargs, + ) -> CopyJob: + """ + Copy a BigQuery table. + + Args: + source_dataset_id: Source dataset ID + source_table_id: Source table ID + destination_dataset_id: Destination dataset ID + destination_table_id: Destination table ID + write_disposition: Write disposition + create_disposition: Create disposition + **kwargs: Additional copy configuration parameters + + Returns: + CopyJob object + """ + try: + source_table_ref = self._get_table_ref(source_dataset_id, source_table_id) + destination_table_ref = self._get_table_ref( + destination_dataset_id, destination_table_id + ) + + job_config = CopyJobConfig( + write_disposition=write_disposition, + create_disposition=create_disposition, + **kwargs, + ) + + copy_job = self.client.copy_table( + source_table_ref, destination_table_ref, job_config=job_config + ) + copy_job.result(timeout=self.timeout) + + logger.info( + f'Copied table {source_dataset_id}.{source_table_id} to {destination_dataset_id}.{destination_table_id}' + ) + return copy_job + + except GoogleCloudError as e: + logger.error( + f'Error copying table {source_dataset_id}.{source_table_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error copying table {source_dataset_id}.{source_table_id}: {e}' + ) + raise + + def extract_table_to_gcs( + self, + dataset_id: str, + table_id: str, + destination_uris: Union[str, List[str]], + destination_format: DestinationFormat = DestinationFormat.CSV, + **kwargs, + ) -> ExtractJob: + """ + Extract a BigQuery table to Google Cloud Storage. + + Args: + dataset_id: Dataset ID + table_id: Table ID + destination_uris: GCS destination URI(s) (gs://bucket/path/to/file) + destination_format: Destination format (CSV, JSON, AVRO, PARQUET) + **kwargs: Additional extract configuration parameters + + Returns: + ExtractJob object + """ + try: + table_ref = self._get_table_ref(dataset_id, table_id) + + job_config = ExtractJobConfig( + destination_format=destination_format, **kwargs + ) + + extract_job = self.client.extract_table( + table_ref, destination_uris, job_config=job_config + ) + extract_job.result(timeout=self.timeout) + + logger.info( + f'Extracted table {dataset_id}.{table_id} to {destination_uris}' + ) + return extract_job + + except GoogleCloudError as e: + logger.error(f'Error extracting table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error extracting table {dataset_id}.{table_id}: {e}' + ) + raise + + def get_table_info(self, dataset_id: str, table_id: str) -> Dict[str, Any]: + """ + Get detailed information about a BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + Dictionary containing table information + """ + try: + table = self.get_table(dataset_id, table_id) + + info = { + 'table_id': table.table_id, + 'dataset_id': table.dataset_id, + 'project': table.project, + 'created': table.created, + 'modified': table.modified, + 'description': table.description, + 'num_rows': table.num_rows, + 'num_bytes': table.num_bytes, + 'schema': [field.to_api_repr() for field in table.schema] + if table.schema + else [], + 'time_partitioning': table.time_partitioning.to_api_repr() + if table.time_partitioning + else None, + 'clustering_fields': table.clustering_fields, + 'labels': table.labels, + 'view_query': table.view_query, + 'table_type': table.table_type, + } + + return info + + except GoogleCloudError as e: + logger.error(f'Error getting table info for {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error getting table info for {dataset_id}.{table_id}: {e}' + ) + raise + + def get_table_size(self, dataset_id: str, table_id: str) -> Dict[str, Any]: + """ + Get table size information. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + Dictionary containing size information + """ + try: + table = self.get_table(dataset_id, table_id) + + size_info = { + 'table_id': table.table_id, + 'dataset_id': table.dataset_id, + 'num_rows': table.num_rows, + 'num_bytes': table.num_bytes, + 'num_megabytes': table.num_bytes / (1024 * 1024) + if table.num_bytes + else 0, + 'num_gigabytes': table.num_bytes / (1024 * 1024 * 1024) + if table.num_bytes + else 0, + } + + return size_info + + except GoogleCloudError as e: + logger.error(f'Error getting table size for {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error getting table size for {dataset_id}.{table_id}: {e}' + ) + raise + + def get_job_info(self, job_id: str) -> Dict[str, Any]: + """ + Get information about a BigQuery job. + + Args: + job_id: Job ID + + Returns: + Dictionary containing job information + """ + try: + job = self.client.get_job(job_id) + + info = { + 'job_id': job.job_id, + 'job_type': job.job_type, + 'state': job.state, + 'created': job.created, + 'started': job.started, + 'ended': job.ended, + 'error_result': job.error_result.to_api_repr() + if job.error_result + else None, + 'statistics': job.statistics.to_api_repr() if job.statistics else None, + } + + return info + + except GoogleCloudError as e: + logger.error(f'Error getting job info for {job_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error getting job info for {job_id}: {e}') + raise + + def cancel_job(self, job_id: str) -> bool: + """ + Cancel a running BigQuery job. + + Args: + job_id: Job ID to cancel + + Returns: + True if job was cancelled, False otherwise + """ + try: + job = self.client.get_job(job_id) + if job.state == 'RUNNING': + job.cancel() + logger.info(f'Cancelled job {job_id}') + return True + else: + logger.info(f'Job {job_id} is not running (state: {job.state})') + return False + + except GoogleCloudError as e: + logger.error(f'Error cancelling job {job_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error cancelling job {job_id}: {e}') + raise + + def list_jobs( + self, + state_filter: Optional[str] = None, + min_creation_time: Optional[datetime] = None, + max_creation_time: Optional[datetime] = None, + **kwargs, + ) -> List[Dict[str, Any]]: + """ + List BigQuery jobs. + + Args: + state_filter: Filter jobs by state (RUNNING, DONE, PENDING, etc.) + min_creation_time: Minimum creation time + max_creation_time: Maximum creation time + **kwargs: Additional list parameters + + Returns: + List of job information dictionaries + """ + try: + jobs = [] + for job in self.client.list_jobs(**kwargs): + # Apply filters + if state_filter and job.state != state_filter: + continue + if min_creation_time and job.created < min_creation_time: + continue + if max_creation_time and job.created > max_creation_time: + continue + + job_info = { + 'job_id': job.job_id, + 'job_type': job.job_type, + 'state': job.state, + 'created': job.created, + 'started': job.started, + 'ended': job.ended, + 'user_email': job.user_email, + } + jobs.append(job_info) + + return jobs + + except GoogleCloudError as e: + logger.error(f'Error listing jobs: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error listing jobs: {e}') + raise + + def get_query_plan( + self, query: str, use_legacy_sql: bool = False + ) -> Dict[str, Any]: + """ + Get the query execution plan for a BigQuery query. + + Args: + query: SQL query + use_legacy_sql: Whether to use legacy SQL + + Returns: + Dictionary containing query plan information + """ + try: + job_config = QueryJobConfig(use_legacy_sql=use_legacy_sql, dry_run=True) + query_job = self.client.query(query, job_config=job_config) + + # Get query statistics + stats = query_job.statistics + + plan_info = { + 'total_bytes_processed': stats.total_bytes_processed, + 'total_bytes_billed': stats.total_bytes_billed, + 'billing_tier': stats.billing_tier, + 'cache_hit': stats.cache_hit, + 'num_dml_affected_rows': stats.num_dml_affected_rows, + 'ddl_operation_performed': stats.ddl_operation_performed, + 'ddl_target_table': stats.ddl_target_table.to_api_repr() + if stats.ddl_target_table + else None, + 'ddl_target_routine': stats.ddl_target_routine.to_api_repr() + if stats.ddl_target_routine + else None, + 'ddl_target_dataset': stats.ddl_target_dataset.to_api_repr() + if stats.ddl_target_dataset + else None, + 'ddl_target_view': stats.ddl_target_view.to_api_repr() + if stats.ddl_target_view + else None, + 'statement_type': stats.statement_type, + 'estimated_bytes_processed': stats.estimated_bytes_processed, + } + + return plan_info + + except GoogleCloudError as e: + logger.error(f'Error getting query plan: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error getting query plan: {e}') + raise + + async def test_connection(self) -> bool: + """ + Test the BigQuery connection by executing a simple query. + + Returns: + True if connection is successful, False otherwise + """ + try: + # Execute a simple query to test connection + query = 'SELECT 1 as test' + await self.execute_query(query) + logger.info('BigQuery connection test successful') + return True + + except Exception as e: + logger.error(f'BigQuery connection test failed: {e}') + return False + + def get_project_info(self) -> Dict[str, Any]: + """ + Get information about the current BigQuery project. + + Returns: + Dictionary containing project information + """ + try: + project = self.client.project + + info = { + 'project_id': project, + 'location': self.location, + 'client_location': self.client.location, + } + + return info + + except GoogleCloudError as e: + logger.error(f'Error getting project info: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error getting project info: {e}') + raise + + def create_view( + self, + dataset_id: str, + view_id: str, + query: str, + description: Optional[str] = None, + use_legacy_sql: bool = False, + **kwargs, + ) -> Table: + """ + Create a BigQuery view. + + Args: + dataset_id: Dataset ID + view_id: View ID + query: SQL query for the view + description: View description + use_legacy_sql: Whether to use legacy SQL + **kwargs: Additional view configuration parameters + + Returns: + Created Table object (view) + """ + try: + table_ref = self._get_table_ref(dataset_id, view_id) + view = Table(table_ref) + + view.view_query = query + view.view_use_legacy_sql = use_legacy_sql + + if description: + view.description = description + + # Set additional properties + for key, value in kwargs.items(): + if hasattr(view, key): + setattr(view, key, value) + + view = self.client.create_table(view, timeout=self.timeout) + logger.info(f'Created view {dataset_id}.{view_id}') + return view + + except GoogleCloudError as e: + logger.error(f'Error creating view {dataset_id}.{view_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error creating view {dataset_id}.{view_id}: {e}') + raise + + def create_materialized_view( + self, + dataset_id: str, + view_id: str, + query: str, + description: Optional[str] = None, + enable_refresh: bool = True, + refresh_interval_ms: int = 1800000, # 30 minutes + **kwargs, + ) -> Table: + """ + Create a BigQuery materialized view. + + Args: + dataset_id: Dataset ID + view_id: View ID + query: SQL query for the materialized view + description: View description + enable_refresh: Whether to enable automatic refresh + refresh_interval_ms: Refresh interval in milliseconds + **kwargs: Additional view configuration parameters + + Returns: + Created Table object (materialized view) + """ + try: + table_ref = self._get_table_ref(dataset_id, view_id) + view = Table(table_ref) + + view.view_query = query + view.materialized_view = { + 'enable_refresh': enable_refresh, + 'refresh_interval_ms': refresh_interval_ms, + } + + if description: + view.description = description + + # Set additional properties + for key, value in kwargs.items(): + if hasattr(view, key): + setattr(view, key, value) + + view = self.client.create_table(view, timeout=self.timeout) + logger.info(f'Created materialized view {dataset_id}.{view_id}') + return view + + except GoogleCloudError as e: + logger.error( + f'Error creating materialized view {dataset_id}.{view_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error creating materialized view {dataset_id}.{view_id}: {e}' + ) + raise + + async def refresh_materialized_view(self, dataset_id: str, view_id: str) -> bool: + """ + Refresh a BigQuery materialized view. + + Args: + dataset_id: Dataset ID + view_id: View ID + + Returns: + True if refresh was successful, False otherwise + """ + try: + table_ref = self._get_table_ref(dataset_id, view_id) + table = self.client.get_table(table_ref) + + if not table.materialized_view: + logger.warning( + f'Table {dataset_id}.{view_id} is not a materialized view' + ) + return False + + # Create a query job to refresh the materialized view + query = f'SELECT * FROM `{self.project_id}.{dataset_id}.{view_id}` LIMIT 0' + await self.execute_query(query) + + logger.info(f'Refreshed materialized view {dataset_id}.{view_id}') + return True + + except GoogleCloudError as e: + logger.error( + f'Error refreshing materialized view {dataset_id}.{view_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error refreshing materialized view {dataset_id}.{view_id}: {e}' + ) + raise + + async def execute_batch_queries( + self, queries: List[str], use_legacy_sql: bool = False, **kwargs + ) -> List[QueryJob]: + """ + Execute multiple queries in sequence. + + Args: + queries: List of SQL queries to execute + use_legacy_sql: Whether to use legacy SQL + **kwargs: Additional query configuration parameters + + Returns: + List of QueryJob objects + """ + jobs = [] + for i, query in enumerate(queries): + try: + logger.info(f'Executing query {i+1}/{len(queries)}') + job = await self.execute_query( + query, use_legacy_sql=use_legacy_sql, **kwargs + ) + jobs.append(job) + except Exception as e: + logger.error(f'Error executing query {i+1}: {e}') + raise + + return jobs + + def get_table_schema(self, dataset_id: str, table_id: str) -> List[Dict[str, Any]]: + """ + Get the schema of a BigQuery table. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + List of field definitions as dictionaries + """ + try: + table = self.get_table(dataset_id, table_id) + schema = [] + + for field in table.schema: + field_info = { + 'name': field.name, + 'type': field.field_type, + 'mode': field.mode, + 'description': field.description, + } + + if field.fields: # Nested fields + field_info['fields'] = [ + { + 'name': nested_field.name, + 'type': nested_field.field_type, + 'mode': nested_field.mode, + 'description': nested_field.description, + } + for nested_field in field.fields + ] + + schema.append(field_info) + + return schema + + except GoogleCloudError as e: + logger.error(f'Error getting schema for {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error getting schema for {dataset_id}.{table_id}: {e}' + ) + raise + + def estimate_query_cost( + self, query: str, use_legacy_sql: bool = False + ) -> Dict[str, Any]: + """ + Estimate the cost of running a query (dry run). + + Args: + query: SQL query to estimate + use_legacy_sql: Whether to use legacy SQL + + Returns: + Dictionary containing cost estimation information + """ + try: + plan = self.get_query_plan(query, use_legacy_sql=use_legacy_sql) + + # BigQuery pricing (as of 2024): $5 per TB processed + bytes_processed = plan['total_bytes_processed'] + tb_processed = bytes_processed / (1024**4) # Convert to TB + estimated_cost = tb_processed * 5 # $5 per TB + + cost_info = { + 'bytes_processed': bytes_processed, + 'tb_processed': tb_processed, + 'estimated_cost_usd': estimated_cost, + 'billing_tier': plan['billing_tier'], + 'cache_hit': plan['cache_hit'], + } + + return cost_info + + except GoogleCloudError as e: + logger.error(f'Error estimating query cost: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error estimating query cost: {e}') + raise + + def get_table_partition_info( + self, dataset_id: str, table_id: str + ) -> Dict[str, Any]: + """ + Get information about table partitions. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + Dictionary containing partition information + """ + try: + table = self.get_table(dataset_id, table_id) + + if not table.time_partitioning: + return {'has_partitioning': False} + + # Query to get partition information + partition_query = f""" + SELECT + partition_id, + creation_time, + last_modified_time, + total_rows, + total_logical_bytes, + total_physical_bytes + FROM `{self.project_id}.{dataset_id}.{table_id}$__PARTITIONS_SUMMARY__` + ORDER BY partition_id DESC + LIMIT 100 + """ + + try: + partitions = self.execute_query(partition_query) + except Exception as e: + print(e) + # If partition summary table doesn't exist, return basic info + partitions = [] + + partition_info = { + 'has_partitioning': True, + 'partitioning_type': table.time_partitioning.type_, + 'partitioning_field': table.time_partitioning.field, + 'partitioning_expiration_ms': table.time_partitioning.expiration_ms, + 'partitions': partitions, + } + + return partition_info + + except GoogleCloudError as e: + logger.error( + f'Error getting partition info for {dataset_id}.{table_id}: {e}' + ) + raise + except Exception as e: + logger.error( + f'Unexpected error getting partition info for {dataset_id}.{table_id}: {e}' + ) + raise + + async def optimize_table(self, dataset_id: str, table_id: str) -> bool: + """ + Optimize a BigQuery table by running OPTIMIZE command. + + Args: + dataset_id: Dataset ID + table_id: Table ID + + Returns: + True if optimization was successful, False otherwise + """ + try: + optimize_query = f'OPTIMIZE `{self.project_id}.{dataset_id}.{table_id}`' + await self.execute_query(optimize_query) + + logger.info(f'Optimized table {dataset_id}.{table_id}') + return True + + except GoogleCloudError as e: + logger.error(f'Error optimizing table {dataset_id}.{table_id}: {e}') + raise + except Exception as e: + logger.error( + f'Unexpected error optimizing table {dataset_id}.{table_id}: {e}' + ) + raise + + def get_query_history( + self, + max_results: int = 100, + state_filter: Optional[str] = None, + min_creation_time: Optional[datetime] = None, + max_creation_time: Optional[datetime] = None, + ) -> List[Dict[str, Any]]: + """ + Get detailed query history with filtering options. + + Args: + max_results: Maximum number of results to return + state_filter: Filter by job state (RUNNING, DONE, PENDING, etc.) + min_creation_time: Minimum creation time + max_creation_time: Maximum creation time + + Returns: + List of detailed job information dictionaries + """ + try: + jobs = self.list_jobs( + max_results=max_results, + state_filter=state_filter, + min_creation_time=min_creation_time, + max_creation_time=max_creation_time, + ) + + detailed_jobs = [] + for job_info in jobs: + try: + detailed_job = self.get_job_info(job_info['job_id']) + detailed_jobs.append(detailed_job) + except Exception as e: + logger.warning( + f"Could not get detailed info for job {job_info['job_id']}: {e}" + ) + detailed_jobs.append(job_info) + + return detailed_jobs + + except GoogleCloudError as e: + logger.error(f'Error getting query history: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error getting query history: {e}') + raise + + def wait_for_job_completion( + self, job_id: str, timeout: Optional[int] = None + ) -> Dict[str, Any]: + """ + Wait for a BigQuery job to complete and return the result. + + Args: + job_id: Job ID to wait for + timeout: Timeout in seconds (uses client timeout if None) + + Returns: + Dictionary containing job result information + """ + try: + job = self.client.get_job(job_id) + timeout = timeout or self.timeout + + # Wait for job completion + job.result(timeout=timeout) + + result_info = { + 'job_id': job.job_id, + 'state': job.state, + 'created': job.created, + 'started': job.started, + 'ended': job.ended, + 'error_result': job.error_result.to_api_repr() + if job.error_result + else None, + } + + return result_info + + except GoogleCloudError as e: + logger.error(f'Error waiting for job {job_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error waiting for job {job_id}: {e}') + raise + + def execute_transaction( + self, statements: List[str], use_legacy_sql: bool = False, **kwargs + ) -> bigquery.job.QueryJob: + """ + Execute multiple SQL statements as a single transaction using BigQuery scripting. + All statements are wrapped in a BEGIN ... COMMIT block. + If any statement fails, the transaction is rolled back. + + Args: + statements: List of SQL statements to execute transactionally + use_legacy_sql: Whether to use legacy SQL (default: False) + **kwargs: Additional query configuration parameters + + Returns: + QueryJob object for the transaction script + """ + if not statements: + raise ValueError('No statements provided for transaction.') + script = 'BEGIN\n' + '\n'.join(statements) + '\nCOMMIT;' + try: + job_config = bigquery.QueryJobConfig( + use_legacy_sql=use_legacy_sql, **kwargs + ) + query_job = self.client.query(script, job_config=job_config) + query_job.result(timeout=self.timeout) + logger.info('Transaction committed successfully.') + return query_job + except GoogleCloudError as e: + logger.error(f'BigQuery transaction execution error: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error executing BigQuery transaction: {e}') + raise + + def insert_rows_json( + self, + table_id: str, + json_rows: List[Dict[str, Any]], + **kwargs, + ) -> List[Dict[str, Any]]: + """ + Insert rows into a BigQuery table using JSON format. + + Args: + table_id: Full table ID (project.dataset.table) + json_rows: List of dictionaries representing rows to insert + **kwargs: Additional insert configuration parameters + + Returns: + List of errors (empty list if successful) + + Raises: + GoogleCloudError: For BigQuery-specific errors + Exception: For other unexpected errors + """ + try: + result = self.client.insert_rows_json(table_id, json_rows, **kwargs) + + # Check if there are any errors in the result + if result and any(error.get('errors') for error in result): + error_messages = [] + for row_error in result: + if row_error.get('errors'): + for error in row_error['errors']: + error_messages.append( + f"Row {row_error.get('index', 'unknown')}: {error.get('message', 'Unknown error')} " + f"(location: {error.get('location', 'unknown')})" + ) + + error_msg = f"Failed to insert rows into {table_id}. Errors: {'; '.join(error_messages)}" + logger.error(error_msg) + raise GoogleCloudError(error_msg) + + logger.info(f'Successfully inserted {len(json_rows)} rows into {table_id}') + return result + except GoogleCloudError as e: + logger.error(f'BigQuery error inserting rows into {table_id}: {e}') + raise + except Exception as e: + logger.error(f'Unexpected error inserting rows into {table_id}: {e}') + raise + + def __get_join_query( + self, + join_query: str, + table_names: List[str], + table_prefix: str, + projection: str, + where_clause: str, + limit: int, + offset: int, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> str: + aliases = list(string.ascii_lowercase) + for i, table_name in enumerate(table_names): + join_query = join_query.replace( + f'JOIN {table_name}', + f'JOIN `{table_prefix}{table_name}` AS {aliases[i]}', + ) + join_query = join_query.replace(f'{table_name}.', f'{aliases[i]}.') + where_clause = where_clause.replace(f'{table_name}.', f'{aliases[i]}.') + + # Separate projections for parent table 'a' and each child table + parent_projection = '' + child_projections = {} # Dictionary to store projections for each child table + + for col in projection.split(','): + col = col.strip() + if col.startswith(f'{aliases[0]}.'): # Columns from table 'a' + if parent_projection: + parent_projection += ', ' + parent_projection += col + else: + table_alias = col.split('.')[0] + if table_alias not in child_projections: + child_projections[table_alias] = '' + if child_projections[table_alias]: + child_projections[table_alias] += ', ' + child_projections[table_alias] += col + + agg_query = [] + for idx, (projection) in enumerate(child_projections.values()): + agg_query.append(f""" + ARRAY_AGG( + STRUCT( + {projection} + ) + ) as {table_names[idx+1]} + """) + + order_by_clause = f'ORDER BY {order_by}' if order_by else '' + group_by_clause = f'{group_by},' if group_by else '' + query = f""" + SELECT + {parent_projection if parent_projection else f'ANY_VALUE({aliases[0]}).*'}, + {', '.join(agg_query)} + FROM `{table_prefix}{table_names[0]}` AS {aliases[0]} + {join_query} + WHERE {where_clause} + GROUP BY {group_by_clause} {parent_projection.split(',')[0] if parent_projection else 'a.id'} + {order_by_clause} + LIMIT {limit} OFFSET {offset}; + """ + + return query diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/gcp/gcs.py b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/gcs.py new file mode 100644 index 00000000..217142e2 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/gcs.py @@ -0,0 +1,219 @@ +from datetime import datetime, timedelta, UTC +import io +from itertools import islice +from google.cloud import storage +from google.cloud.exceptions import NotFound +from typing import Optional, List, Tuple +from .._types import CloudStorageHandler +from ..exceptions import CloudStorageFileNotFoundError +import re +from re import Match + + +class GCSStorage(CloudStorageHandler): + """Google Cloud Storage implementation""" + + def __init__(self, credentials_path: Optional[str] = None): + """ + Initialize GCP client + + Args: + credentials_path: Path to GCP credentials JSON file (optional) + """ + if credentials_path: + self.client = storage.Client.from_service_account_json(credentials_path) + else: + self.client = storage.Client() + + def get_file(self, bucket_name: str, file_path: str) -> bytes: + """ + Get file from GCS bucket and return as buffer + + Args: + bucket_name (str): Name of the GCS bucket + file_path (str): Path to the file in bucket + + Returns: + File content as bytes + + Raises: + CloudStorageFileNotFoundError: If file not found + Exception: If other errors occur + """ + try: + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(file_path) + return blob.download_as_bytes() + except NotFound: + # GCS-specific NotFound exception + raise CloudStorageFileNotFoundError(bucket_name, file_path) + except Exception as e: + raise Exception(f'Error reading file from GCS: {str(e)}') + + def save_large_file( + self, + data: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """GCS implementation of large file upload using upload_from_file with streaming.""" + try: + if not bucket_name: + raise ValueError('bucket_name cannot be None or empty') + if not key: + raise ValueError('key cannot be None or empty') + if data is None: + raise ValueError('data cannot be None') + + fileobj = io.BytesIO(data) + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(key) + + if content_type is not None: + blob.upload_from_file(fileobj, content_type=content_type) + else: + blob.upload_from_file(fileobj) + except Exception as e: + raise Exception(f'Error uploading large file to GCS: {str(e)}') + + def save_small_file( + self, + file_content: bytes, + bucket_name: str, + key: str, + content_type: Optional[str] = None, + ) -> None: + """GCS implementation of small file upload using upload_from_string.""" + try: + if not bucket_name: + raise ValueError('bucket_name cannot be None or empty') + if not key: + raise ValueError('key cannot be None or empty') + if file_content is None: + raise ValueError('file_content cannot be None') + + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(key) + + if content_type is not None: + blob.upload_from_string(file_content, content_type=content_type) + else: + blob.upload_from_string(file_content) + except Exception as e: + raise Exception(f'Error uploading small file to GCS: {str(e)}') + + def get_bucket_key(self, value: str): + match: Optional[Match[str]] = re.match(r'gs://([^/]+)/(.+)', value) + if not match: + raise ValueError('Invalid GCS URL format') + bucket_name = match.group(1) + key = match.group(2) + return bucket_name, key + + def generate_presigned_url( + self, bucket_name: str, key: str, type: str, expiresIn: int = 300 + ) -> str: + """ + Generate a presigned URL for a file in a GCS bucket. + + Args: + bucket_name (str): Name of the GCS bucket + key (str): Path to the file in the bucket + type (str): HTTP method for the presigned URL (e.g., 'GET', 'PUT') + expiresIn (int, optional): Expiration time in seconds (default: 300) + + Returns: + str: Presigned URL + + Raises: + Exception: If URL generation fails + """ + try: + if not bucket_name: + raise ValueError('bucket_name cannot be None or empty') + if not key: + raise ValueError('key cannot be None or empty') + if not type: + raise ValueError('type cannot be None or empty') + + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(key) + presigned_url = blob.generate_signed_url( + version='v4', + expiration=datetime.now(UTC) + timedelta(seconds=expiresIn), + method=type, + ) + return presigned_url + except Exception as e: + raise Exception(f'Error generating presigned URL for GCS: {str(e)}') + + def list_files( + self, bucket_name: str, prefix: str, page_size: int = 50, page_number: int = 1 + ) -> Tuple[List[str], bool]: + """ + List files in a GCS bucket with prefix filtering and pagination. + Optimized to use server-side pagination instead of client-side skipping. + + Args: + bucket_name (str): Name of the GCS bucket + prefix (str): Prefix to filter files + page_size (int): Number of files per page (default: 50) + page_number (int): Which page to retrieve, 1-based (default: 1) + + Returns: + Tuple[List[str], bool]: (list of blob names, has_next_page) + + Raises: + Exception: If listing fails + """ + try: + if page_number < 1: + raise ValueError('page_number must be >= 1') + if page_size < 1: + raise ValueError('page_size must be >= 1') + + bucket = self.client.bucket(bucket_name) + + # Create an iterator for all matching blobs. The library handles API calls page by page. + blobs_iterator = bucket.list_blobs(prefix=prefix) + + # Calculate the start and end index for the desired page + start_index = (page_number - 1) * page_size + # We fetch one extra item to check if there's a next page + end_index = start_index + page_size + 1 + + # Use islice to efficiently get only the items for our page. + # It advances the iterator internally without pulling all data. + page_slice = islice(blobs_iterator, start_index, end_index) + + # Convert the iterator slice to a list + file_names = [blob.name for blob in page_slice] + + # Determine if there's a next page + has_next_page = len(file_names) > page_size + + # Return only the requested page size + if has_next_page: + return file_names[:page_size], True + else: + return file_names, False + + except NotFound: + raise Exception(f'Bucket {bucket_name} not found') + except Exception as e: + raise Exception(f'Error listing files from GCS: {str(e)}') + + def delete_file(self, bucket_name: str, file_path: str) -> None: + """ + Delete file from GCS bucket + Args: + bucket_name: Name of the GCS bucket + file_path: Path to the file in bucket + """ + try: + bucket = self.client.bucket(bucket_name) + blob = bucket.blob(file_path) + blob.delete() + except Exception as e: + raise Exception(f'Error deleting file from GCS: {str(e)}') diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/gcp/kms.py b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/kms.py new file mode 100644 index 00000000..c51dc731 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/kms.py @@ -0,0 +1,100 @@ +import os +from .._types import FloKMS +from google.cloud import kms +from google.cloud import kms_v1 + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import padding +from cryptography.hazmat.primitives.asymmetric import utils + +gcp_project_id = os.getenv('GCP_PROJECT_ID') +gcp_location = os.getenv('GCP_LOCATION') +gcp_key_ring = os.getenv('GCP_KMS_KEY_RING') +gcp_crypto_key = os.getenv('GCP_KMS_CRYPTO_KEY') +gcp_crypto_key_version = os.getenv('GCP_KMS_CRYPTO_KEY_VERSION') + + +class GcpKMS(FloKMS): + def __init__(self): + if not all( + [ + gcp_project_id, + gcp_location, + gcp_key_ring, + gcp_crypto_key, + gcp_crypto_key_version, + ] + ): + raise ValueError( + 'PROJECT_ID, LOCATION, KEY_RING, CRYPTO_KEY, CRYPTO_KEY_VERSION must be set' + ) + + self.kms_client = kms.KeyManagementServiceClient() + self.key_name = self.kms_client.crypto_key_version_path( + project=gcp_project_id, + location=gcp_location, + key_ring=gcp_key_ring, + crypto_key=gcp_crypto_key, + crypto_key_version=gcp_crypto_key_version, + ) + + def encrypt(self, plaintext: str) -> bytes: + request = kms_v1.EncryptRequest( + name=self.key_name, + plaintext=plaintext, + ) + response = self.kms_client.encrypt(request=request) + return response.ciphertext + + def decrypt(self, ciphertext: str) -> bytes: + request = kms_v1.DecryptRequest( + name=self.key_name, + ciphertext=ciphertext, + ) + response = self.kms_client.decrypt(request=request) + return response.plaintext + + def sign(self, message: bytes, **kwargs) -> bytes: + request = kms_v1.AsymmetricSignRequest( + name=self.key_name, + digest=kms_v1.Digest( + sha256=message, + ), + ) + + response = self.kms_client.asymmetric_sign(request=request) + return response.signature + + def verify(self, message: bytes, signature: bytes, **kwargs) -> bool: + public_key_pem: bytes | str = self.get_public_key_pem(encode=True) + if isinstance(public_key_pem, str): + raise ValueError('Public key is not a bytes object') + rsa_key = serialization.load_pem_public_key(public_key_pem, default_backend()) + + try: + rsa_key.verify( # type: ignore + signature=signature, + data=message, + padding=padding.PSS( # type: ignore + mgf=padding.MGF1(hashes.SHA256()), + salt_length=padding.PSS.MAX_LENGTH, + ), + algorithm=utils.Prehashed(hashes.SHA256()), # type: ignore + ) + return True + except InvalidSignature: + return False + + def get_public_key_pem(self, **kwargs) -> bytes | str: + encode = kwargs.get('encode', False) + + request = kms_v1.GetPublicKeyRequest( + name=self.key_name, + ) + public_key = self.kms_client.get_public_key(request=request) + if encode: + return public_key.pem.encode('utf-8') + return public_key.pem diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/gcp/pubsub.py b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/pubsub.py new file mode 100644 index 00000000..a94c8169 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/gcp/pubsub.py @@ -0,0 +1,83 @@ +import os +import json +from typing import List +from google.cloud import pubsub_v1 +from .._types import MessageQueue, MessageQueueDict + +gcp_project_id = os.getenv('GCP_PROJECT_ID') +gcp_pubsub_subscription_id = os.getenv('GCP_PUBSUB_SUBSCRIPTION_ID') +gcp_pubsub_topic_id = os.getenv('GCP_PUBSUB_TOPIC_ID') + + +class PubSubQueue(MessageQueue): + def __init__(self): + self.project_id = gcp_project_id + self.subscription_path = ( + f'projects/{self.project_id}/subscriptions/{gcp_pubsub_subscription_id}' + ) + self.subscriber = pubsub_v1.SubscriberClient() + self.publisher = pubsub_v1.PublisherClient() + + def delete_message(self, ack_id: str): + try: + self.subscriber.acknowledge( + request={'subscription': self.subscription_path, 'ack_ids': [ack_id]} + ) + except Exception as e: + raise e + + def receive_messages( + self, max_messages=10, wait_time_sec=20 + ) -> List[MessageQueueDict] | None: + try: + response = self.subscriber.pull( + request={ + 'subscription': self.subscription_path, + 'max_messages': max_messages, + }, + timeout=wait_time_sec, + ) + + messages = [] + + for received_msg in response.received_messages: + data_str = received_msg.message.data.decode('utf-8') + body = json.loads(data_str) + messages.append( + MessageQueueDict( + body=body, + ack_id=received_msg.ack_id, + id=received_msg.message.message_id, + ) + ) + + return messages + except Exception as e: + raise e + + def add_message( + self, + message_body: dict, + topic_name_or_queue_url: str | None = None, + **attributes, + ): + try: + if not topic_name_or_queue_url: + topic_name_or_queue_url = gcp_pubsub_topic_id + + topic_path = f'projects/{self.project_id}/topics/{topic_name_or_queue_url}' + + message_data = json.dumps(message_body).encode('utf-8') + + # Publish with optional attributes + future = self.publisher.publish( + topic_path, + message_data, + **attributes, # Can include custom attributes like {"source": "api", "version": "1.0"} + ) + + message_id = future.result() + return message_id + + except Exception as e: + raise e diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/kms.py b/wavefront/server/packages/flo_cloud/flo_cloud/kms.py new file mode 100644 index 00000000..48e73136 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/kms.py @@ -0,0 +1,36 @@ +from .aws.kms import AwsKMS +from .gcp.kms import GcpKMS +from ._types import CloudProvider, FloKMS + + +class FloKmsService(FloKMS): + def __init__(self, cloud_provider: str): + self.cloud_provider = cloud_provider + self.kms_client = self.__get_kms_client() + + def __get_kms_client(self) -> FloKMS: + if self.cloud_provider == CloudProvider.AWS.value: + return AwsKMS() + elif self.cloud_provider == CloudProvider.GCP.value: + return GcpKMS() + else: + raise ValueError(f'Unsupported cloud provider: {self.cloud_provider}') + + def encrypt(self, plaintext: str) -> bytes: + return self.kms_client.encrypt(plaintext) + + def decrypt(self, ciphertext: str) -> bytes: + return self.kms_client.decrypt(ciphertext) + + def sign(self, message: bytes, **kwargs) -> bytes: + if isinstance(message, str): + message = message.encode('utf-8') + return self.kms_client.sign(message, **kwargs) + + def verify(self, message: bytes, signature: bytes, **kwargs) -> bool: + if isinstance(message, str): + message = message.encode('utf-8') + return self.kms_client.verify(message, signature, **kwargs) + + def get_public_key_pem(self, **kwargs) -> bytes | str: + return self.kms_client.get_public_key_pem(**kwargs) diff --git a/wavefront/server/packages/flo_cloud/flo_cloud/message_queue.py b/wavefront/server/packages/flo_cloud/flo_cloud/message_queue.py new file mode 100644 index 00000000..a66917a0 --- /dev/null +++ b/wavefront/server/packages/flo_cloud/flo_cloud/message_queue.py @@ -0,0 +1,30 @@ +from ._types import CloudProvider, MessageQueue +from .aws.sqs import SQSQueue +from .gcp.pubsub import PubSubQueue + + +class MessageQueueManager(MessageQueue): + def __init__(self, cloud_provider: str): + self.cloud_provider = cloud_provider + self.message_queue_client = self.__get_message_queue_client() + + def __get_message_queue_client(self) -> MessageQueue: + if self.cloud_provider == CloudProvider.AWS.value: + return SQSQueue() + elif self.cloud_provider == CloudProvider.GCP.value: + return PubSubQueue() + else: + raise ValueError(f'Unsupported cloud provider: {self.cloud_provider}') + + def receive_messages(self, max_messages=10, wait_time_sec=20): + return self.message_queue_client.receive_messages(max_messages, wait_time_sec) + + def delete_message(self, ack_id: str): + return self.message_queue_client.delete_message(ack_id) + + def add_message( + self, message_body: dict, topic_name_or_queue_url: str | None = None + ) -> str: + return self.message_queue_client.add_message( + message_body, topic_name_or_queue_url + ) diff --git a/wavefront/server/packages/flo_cloud/pyproject.toml b/wavefront/server/packages/flo_cloud/pyproject.toml new file mode 100644 index 00000000..5369e0ad --- /dev/null +++ b/wavefront/server/packages/flo_cloud/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "flo-cloud" +version = "0.1.0" +description = "Cloud service helper functions for flo apps" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "boto3<=1.38.40", + "cryptography>=45.0.4", + "google-cloud-bigquery==3.34.0", + "google-cloud-kms>=3.5.1", + "google-cloud-storage<3.0.0", + "google-cloud-pubsub>=2.28.0", + "redshift-connector>=2.1.7", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["flo_cloud"] diff --git a/wavefront/server/packages/flo_utils/README.md b/wavefront/server/packages/flo_utils/README.md new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/packages/flo_utils/flo_utils/__init__.py b/wavefront/server/packages/flo_utils/flo_utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/packages/flo_utils/flo_utils/constants/workflow.py b/wavefront/server/packages/flo_utils/flo_utils/constants/workflow.py new file mode 100644 index 00000000..c38ddbf2 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/constants/workflow.py @@ -0,0 +1,5 @@ +class WorkflowStatus: + INITIATED = 'initiated' + IN_PROGRESS = 'in_progress' + COMPLETED = 'completed' + FAILED = 'failed' diff --git a/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_base_db.py b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_base_db.py new file mode 100644 index 00000000..23816276 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_base_db.py @@ -0,0 +1,57 @@ +import yaml +import os +from datetime import datetime + + +class LegacyBaseDatabase: + """Base class for database operations""" + + def __init__(self, cloud_provider: str, schema_file: str): + self.cloud_provider = cloud_provider + self.super_fields = [] + self._load_schema(schema_file) + self._initialize_super_fields() + + def _load_schema(self, schema_file: str): + """Load the schema from unified schema file""" + yaml_path = schema_file + if os.path.exists(yaml_path): + with open(yaml_path) as f: + self.schema = yaml.safe_load(f) + else: + raise FileNotFoundError(f'Schema file not found at {yaml_path}') + + def _initialize_super_fields(self): + """Initialize super fields from schema""" + if not self.schema or 'tables' not in self.schema: + return + + try: + table = self.schema['tables'][0] + for field_name, field_info in table['fields'].items(): + if field_info['type'] == 'SUPER': + self.super_fields.append(field_name) + except (IndexError, KeyError): + pass + + def _custom_serializer(self, obj): + """Helper method for JSON serialization""" + if isinstance(obj, datetime): + return obj.isoformat() + if hasattr(obj, 'to_dict'): + return obj.to_dict() + return str(obj) + + def fix_metadata_keys(self, metadata): + """Clean up metadata keys""" + if metadata is None: + return None + if not isinstance(metadata, dict): + raise ValueError('Input must be a dictionary') + return { + str(key).replace(' ', '_').lower(): value for key, value in metadata.items() + } + + def create_tables(self): + """Create tables in the database""" + pass diff --git a/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_bigquery.py b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_bigquery.py new file mode 100644 index 00000000..1d889bdd --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_bigquery.py @@ -0,0 +1,216 @@ +import time +import datetime +from typing import List, Any +from functools import wraps + +from google.cloud import bigquery +from common_module.log.logger import logger +from flo_utils.legacy_db_repository.legacy_base_db import LegacyBaseDatabase + + +def retry_on_connection_error(max_retries=3, initial_delay=1.0): + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + delay = initial_delay + last_exception = None + + for attempt in range(max_retries): + try: + return func(*args, **kwargs) + except Exception as e: + last_exception = e + if attempt < max_retries - 1: # Don't sleep on last attempt + time.sleep(delay) + delay *= 1.5 # Exponential backoff + raise last_exception + + return wrapper + + return decorator + + +class LegacyBigQueryDatastore(LegacyBaseDatabase): + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super(LegacyBigQueryDatastore, cls).__new__(cls) + return cls._instance + + def __init__(self, project_id: str = None, dataset_id: str = None): + self.project_id = project_id + self.dataset_id = dataset_id + if not hasattr(self, 'initialized'): + super().__init__(cloud_provider='gcp', schema_file='resources/schema.yaml') + self._create_client() + self.initialized = True + + def _create_client(self): + """Create BigQuery client.""" + self.client = bigquery.Client(project=self.project_id) + + @retry_on_connection_error() + def execute_query(self, query: str, parameters: dict = None) -> Any: + """Execute a BigQuery query with optional parameters""" + try: + logger.debug(f'Executing query: {query}') + logger.debug(f'Parameters: {parameters}') + + job_config = bigquery.QueryJobConfig() + + if parameters: + query_params = [] + for key, value in parameters.items(): + if isinstance(value, str): + query_params.append( + bigquery.ScalarQueryParameter(key, 'STRING', value) + ) + elif isinstance(value, int): + query_params.append( + bigquery.ScalarQueryParameter(key, 'INT64', value) + ) + elif isinstance(value, float): + query_params.append( + bigquery.ScalarQueryParameter(key, 'FLOAT64', value) + ) + elif isinstance(value, bool): + query_params.append( + bigquery.ScalarQueryParameter(key, 'BOOL', value) + ) + elif isinstance(value, datetime.datetime): + query_params.append( + bigquery.ScalarQueryParameter(key, 'TIMESTAMP', value) + ) + else: + query_params.append( + bigquery.ScalarQueryParameter(key, 'STRING', str(value)) + ) + + job_config.query_parameters = query_params + + query_job = self.client.query(query, job_config=job_config) + result = query_job.result() + + if query.strip().upper().startswith('INSERT'): + logger.info( + f'Insert completed. Affected rows: {query_job.num_dml_affected_rows}' + ) + + return result + + except Exception as e: + logger.error( + f'Query execution failed: {str(e)}\n' + f'Query: {query}\n' + f'Parameters: {parameters}' + ) + raise + + @retry_on_connection_error() + def create_tables(self): + """Create tables using DDL queries from schema manager.""" + table_ddls = self.fetch_ddl_query() + results = [] + for ddl in table_ddls: + logger.info(f'Creating table with DDL: {ddl}') + result = self.execute_query(ddl) + results.append(result) + return results + + @retry_on_connection_error() + def bulk_insert(self, table_name, records): + """Improved bulk insert with chunking and progress tracking.""" + if not records: + logger.warning('No records provided for bulk insert') + return + + chunk_size = 1000 # Adjust based on your needs + total_records = len(records) + table_id = f'{self.project_id}.{self.dataset_id}.{table_name}' + + logger.info( + f'Starting bulk insert of {total_records} records into {table_name}' + ) + + for i in range(0, total_records, chunk_size): + chunk = records[i : i + chunk_size] + + try: + errors = self.client.insert_rows_json(table_id, chunk) + + if errors: + logger.error(f'Errors during bulk insert: {errors}') + raise Exception(f'Failed to insert chunk: {errors}') + + logger.info( + f'Inserted chunk {i//chunk_size + 1} of ' + f'{(total_records + chunk_size - 1)//chunk_size}: ' + f'{len(chunk)} records' + ) + except Exception as e: + logger.error(f'Failed to insert chunk {i//chunk_size + 1}: {str(e)}') + raise + + def fetch_ddl_query(self) -> List[str]: + """Generate DDL queries for table creation""" + queries = [] + + for table in self.schema['tables']: + field_definitions = [] + for field_name, field_info in table['fields'].items(): + nullable = '' if field_info['nullable'] else 'NOT NULL' + bq_type = self._convert_to_bigquery_type(field_info['type']) + field_definitions.append(f'{field_name} {bq_type} {nullable}') + + field_definitions.append('created_at TIMESTAMP NOT NULL') + fields_sql = ',\n '.join(field_definitions) + + full_table_name = f'{self.dataset_id}.{table["name"]}' + query = f""" + CREATE TABLE IF NOT EXISTS {full_table_name} ( + {fields_sql} + ) + """ + queries.append(query) + return queries + + def _convert_to_bigquery_type(self, redshift_type: str) -> str: + """Convert Redshift data types to equivalent BigQuery data types.""" + type_mapping = { + 'INTEGER': 'INT64', + 'INT': 'INT64', + 'SMALLINT': 'INT64', + 'BIGINT': 'INT64', + 'DECIMAL': 'NUMERIC', + 'NUMERIC': 'NUMERIC', + 'REAL': 'FLOAT64', + 'DOUBLE PRECISION': 'FLOAT64', + 'FLOAT': 'FLOAT64', + 'CHAR': 'STRING', + 'CHARACTER': 'STRING', + 'VARCHAR': 'STRING', + 'TEXT': 'STRING', + 'DATE': 'DATE', + 'TIME': 'TIME', + 'TIMETZ': 'TIME', + 'TIMESTAMP': 'TIMESTAMP', + 'TIMESTAMPTZ': 'TIMESTAMP', + 'BOOLEAN': 'BOOL', + 'BOOL': 'BOOL', + 'SUPER': 'JSON', + } + + base_type = redshift_type.split('(')[0].upper() + if base_type in type_mapping: + if '(' in redshift_type and base_type in ['DECIMAL', 'NUMERIC']: + precision_scale = redshift_type[redshift_type.find('(') :] + return f'{type_mapping[base_type]}{precision_scale}' + return type_mapping[base_type] + + return 'STRING' + + @staticmethod + def fetch(project_id: str, dataset_id: str) -> LegacyBaseDatabase: + """Factory method to get singleton instance.""" + return LegacyBigQueryDatastore(project_id=project_id, dataset_id=dataset_id) diff --git a/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_insights_repository.py b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_insights_repository.py new file mode 100644 index 00000000..66a132d8 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_insights_repository.py @@ -0,0 +1,54 @@ +from common_module.log.logger import logger +from flo_utils.legacy_db_repository.legacy_redshift import LegacyRedshiftDatastore +from flo_utils.legacy_db_repository.legacy_bigquery import LegacyBigQueryDatastore +from typing import List, Dict + + +class LegacyInsightsRepository: + def __init__(self): + pass + + def create_tables(self): + pass + + def store(self, table_name: str, records: List[Dict], **kwargs): + pass + + +class LegacyInsightsRedshiftRepository(LegacyInsightsRepository): + def __init__( + self, + redshift_host: str, + redshift_port: int, + redshift_db: str, + redshift_username: str, + redshift_password: str, + ): + self.redshift = LegacyRedshiftDatastore( + redshift_db=redshift_db, + redshift_host=redshift_host, + redshift_port=redshift_port, + redshift_username=redshift_username, + redshift_password=redshift_password, + ) + + def store(self, table_name: str, records: list[dict], **kwargs): + logger.debug(f'Inserting insights count: {len(records)}') + self.redshift.bulk_insert(table_name, records) + + def create_tables(self): + self.redshift.create_tables() + + +class LegacyInsightsBigQueryRepository(LegacyInsightsRepository): + def __init__(self, project_id, dataset_id): + self.bigquery = LegacyBigQueryDatastore( + project_id=project_id, dataset_id=dataset_id + ) + + def store(self, table_name: str, records: list[dict], **kwargs): + logger.debug(f'Inserting insights count: {len(records)}') + self.bigquery.bulk_insert(table_name, records) + + def create_tables(self): + self.bigquery.create_tables() diff --git a/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_redshift.py b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_redshift.py new file mode 100644 index 00000000..ca8a820b --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/legacy_db_repository/legacy_redshift.py @@ -0,0 +1,241 @@ +import time +import redshift_connector +from functools import wraps +from contextlib import contextmanager +from common_module.log.logger import logger +from flo_utils.legacy_db_repository.legacy_base_db import LegacyBaseDatabase +from typing import List + + +def retry_on_connection_error(max_retries=3, delay=1, timeout=30): + def decorator(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + retries = 0 + last_exception = None + + while retries < max_retries: + try: + kwargs.pop('connection', None) + with self.get_connection(timeout) as conn: + return func(self, *args, **kwargs, connection=conn) + except (redshift_connector.Error, Exception) as e: + last_exception = e + retries += 1 + logger.warning( + f'Database connection error: {str(e)}. ' + f'Attempt {retries} of {max_retries}' + ) + + if retries == max_retries: + logger.error( + f'Max retries reached. Last error: {str(last_exception)}' + ) + raise last_exception + + time.sleep(delay * retries) # Exponential backoff + return None + + return wrapper + + return decorator + + +class LegacyRedshiftDatastore(LegacyBaseDatabase): + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super(LegacyRedshiftDatastore, cls).__new__(cls) + return cls._instance + + def __init__( + self, + redshift_host: str, + redshift_port: int, + redshift_db: str, + redshift_username: str, + redshift_password: str, + ): + self.redshift_host = redshift_host + self.redshift_port = redshift_port + self.redshift_db = redshift_db + self.redshift_username = redshift_username + self.redshift_password = redshift_password + if not hasattr(self, 'initialized'): + super().__init__() + self._setup_connection_params() + self.initialized = True + + def _setup_connection_params(self): + """Setup connection parameters.""" + # Check if we're connecting to a local mock server + is_local_mock = self.redshift_host in ['localhost', '127.0.0.1'] + + self.connection_params = { + 'host': self.redshift_host, + 'port': int(self.redshift_port), + 'database': self.redshift_db, + 'user': self.redshift_username, + 'password': self.redshift_password, + } + + if is_local_mock: + self.connection_params['ssl'] = False + + @contextmanager + def get_connection(self, timeout=30): + """Context manager for database connections with timeout.""" + connection = None + try: + connection = redshift_connector.connect(**self.connection_params) + yield connection + except Exception as e: + logger.error(f'Connection error: {str(e)}') + raise + finally: + if connection: + connection.close() + + @retry_on_connection_error() + def execute_query(self, query: str, parameters: dict = None, connection=None): + """Execute a query with better error handling and logging.""" + try: + logger.debug(f'Executing query: {query}') + logger.debug(f'Parameters: {parameters}') + + if connection is None: + with self.get_connection() as connection: + cursor = connection.cursor() + if parameters: + # Convert named parameters to positional + formatted_query = query + param_values = [] + for key, value in parameters.items(): + formatted_query = formatted_query.replace(f':{key}', '%s') + param_values.append(value) + cursor.execute(formatted_query, param_values) + else: + cursor.execute(query) + + if query.strip().upper().startswith('SELECT'): + result = cursor.fetchall() + else: + connection.commit() + result = cursor + cursor.close() + else: + cursor = connection.cursor() + if parameters: + # Convert named parameters to positional + formatted_query = query + param_values = [] + for key, value in parameters.items(): + formatted_query = formatted_query.replace(f':{key}', '%s') + param_values.append(value) + cursor.execute(formatted_query, param_values) + else: + cursor.execute(query) + + if query.strip().upper().startswith('SELECT'): + result = cursor.fetchall() + else: + result = cursor + cursor.close() + + if query.strip().upper().startswith('INSERT'): + logger.info(f'Insert completed. Rowcount: {result.rowcount}') + + return result + + except Exception as e: + logger.error( + f'Query execution failed: {str(e)}\n' + f'Query: {query}\n' + f'Parameters: {parameters}' + ) + raise + + def create_tables(self): + """Create tables using DDL queries from schema manager.""" + table_ddls = self.fetch_ddl_query() + results = [] + for ddl in table_ddls: + logger.info(f'Creating table with DDL: {ddl}') + result = self.execute_query(ddl) + results.append(result) + return results + + def _prepare_values_placeholder(self, super_fields: list, column_name: str): + """Prepare placeholder for field value in SQL""" + if column_name in super_fields: + return 'JSON_PARSE(%s)' + return '%s' + + def bulk_insert_query(self, full_table_name: str, columns) -> str: + """Generate bulk insert query""" + BULK_INSERT = f""" + INSERT INTO {full_table_name} ({', '.join(columns)}) + VALUES ({', '.join([self._prepare_values_placeholder(self.super_fields, col) for col in columns])}) + """ + return BULK_INSERT + + @retry_on_connection_error() + def bulk_insert(self, full_table_name: str, records: list[dict], connection=None): + """Improved bulk insert with chunking and progress tracking.""" + if not records: + logger.warning('No records provided for bulk insert') + return + + chunk_size = 1000 # Adjust based on your needs + total_records = len(records) + columns = records[0].keys() + + logger.info( + f'Starting bulk insert of {total_records} records into {full_table_name}' + ) + + for i in range(0, total_records, chunk_size): + chunk = records[i : i + chunk_size] + bulk_insert_q = self.bulk_insert_query(full_table_name, columns) + + try: + self.execute_query(bulk_insert_q, chunk, connection=connection) + logger.info( + f'Inserted chunk {i//chunk_size + 1} of ' + f'{(total_records + chunk_size - 1)//chunk_size}: ' + f'{len(chunk)} records' + ) + except Exception as e: + logger.error(f'Failed to insert chunk {i//chunk_size + 1}: {str(e)}') + raise + + def fetch_ddl_query(self) -> List[str]: + """Generate DDL queries for table creation""" + queries = [] + + for table in self.schema['tables']: + field_definitions = [] + for field_name, field_info in table['fields'].items(): + nullable = 'NULL' if field_info['nullable'] else 'NOT NULL' + field_definitions.append( + f"{field_name} {field_info['type']} {nullable}" + ) + + field_definitions.append('created_at TIMESTAMPTZ NOT NULL') + fields_sql = ',\n '.join(field_definitions) + + query = f""" + CREATE TABLE IF NOT EXISTS {table["name"]} ( + {fields_sql} + ) + DISTSTYLE AUTO + SORTKEY AUTO; + """ + queries.append(query) + return queries + + @staticmethod + def fetch() -> LegacyBaseDatabase: + """Factory method to get singleton instance.""" + return LegacyRedshiftDatastore() diff --git a/wavefront/server/packages/flo_utils/flo_utils/legacy_schema_manager/legacy_schema_manager.py b/wavefront/server/packages/flo_utils/flo_utils/legacy_schema_manager/legacy_schema_manager.py new file mode 100644 index 00000000..4bbd02fc --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/legacy_schema_manager/legacy_schema_manager.py @@ -0,0 +1,211 @@ +import json +import yaml +import os +from datetime import datetime +from typing import List + + +class TableNameConstants: + GOLD_LOAN_DATA = 'rf_gold_data_object' + GOLD_ITEM_DATA = 'rf_gold_item_details' + + +class LegacySchemaManager: + """Base class for all schema managers""" + + def __init__(self, cloud_provider: str, schema_file: str): + """Initialize base schema manager""" + self.cloud_provider = cloud_provider + self.super_fields = {} + self.schema = self._load_schema(schema_file) + self._initialize_super_fields() + + def _load_schema(self, schema_file: str): + """Load the schema from unified schema file""" + yaml_path = schema_file + if os.path.exists(yaml_path): + with open(yaml_path) as f: + full_schema = yaml.safe_load(f) + return full_schema + raise ValueError(f'Schema file not found at {yaml_path}') + + def _initialize_super_fields(self): + """Initialize super fields from schema, organized by table""" + if ( + not hasattr(self, 'schema') + or not self.schema + or 'tables' not in self.schema + ): + return + + # Track super fields per table + for table in self.schema['tables']: + table_super_fields = [] + for field_name, field_info in table['fields'].items(): + field_type = field_info['type'] + if field_type == 'SUPER': + table_super_fields.append(field_name) + self.super_fields[table['name']] = table_super_fields + + @staticmethod + def fetch(): + """Factory method - should be implemented by subclasses""" + raise NotImplementedError('Subclasses must implement fetch') + + def fetch_ddl_query(self, table_name: str, dataset_id: str = None) -> List[str]: + """Generate DDL queries for table creation""" + queries = [] + + for table in self.schema['tables']: + field_definitions = [] + for field_name, field_info in table['fields'].items(): + if self.is_aws: + nullable = 'NULL' if field_info['nullable'] else 'NOT NULL' + field_definitions.append( + f"{field_name} {field_info['type']} {nullable}" + ) + elif self.is_gcp: + nullable = '' if field_info['nullable'] else 'NOT NULL' + bq_type = self._convert_to_bigquery_type(field_info['type']) + field_definitions.append(f'{field_name} {bq_type} {nullable}') + + timestamp_type = 'TIMESTAMPTZ' if self.is_aws else 'TIMESTAMP' + field_definitions = [ + *field_definitions, + f'created_at {timestamp_type} NOT NULL', + ] + + fields_sql = ',\n '.join(field_definitions) + + if self.cloud_provider == 'aws': + full_table_name = self.resolve_table_name(table_name, table['name']) + query = f""" + CREATE TABLE IF NOT EXISTS {full_table_name} ( + {fields_sql} + ) + DISTSTYLE AUTO + SORTKEY AUTO; + """ + elif self.cloud_provider == 'gcp': + full_table_name = ( + f'{dataset_id}.{self.resolve_table_name(table_name, table["name"])}' + ) + query = f""" + CREATE TABLE IF NOT EXISTS {full_table_name} ( + {fields_sql} + ) + """ + queries.append(query) + return queries + + def _custom_serializer(self, obj): + """Helper method for JSON serialization""" + if isinstance(obj, datetime): + return obj.isoformat() + if hasattr(obj, 'to_dict'): + return obj.to_dict() + return str(obj) + + def populate_schema( + self, core_table_schema: dict, table_name: str, record: dict, insights: dict + ) -> dict: + """Populate schema with entries""" + table_super_fields = self.super_fields.get(table_name, []) + for field in core_table_schema['fields']: + value = insights.get(field, record.get(field)) + if value is not None: + if field in table_super_fields: + value = json.dumps(value, default=self._custom_serializer) + record[field] = value + elif field not in record: + record[field] = None + return record + + def _prepare_values_placeholder( + self, super_fields: list, column_name: str, is_gcp: bool = False + ): + """Prepare placeholder for field value in SQL""" + if is_gcp: + if column_name in super_fields: + return f'JSON_EXTRACT_SCALAR(@{column_name})' + return f'@{column_name}' + else: + if column_name in super_fields: + return f'JSON_PARSE(:{column_name})' + return f':{column_name}' + + def resolve_table_name(self, table_name: str, rf_internal_name: str): + """Resolve full table name""" + if table_name == '': + return f'rf_{rf_internal_name}' + return f'rf_{rf_internal_name}_{table_name}' + + def fix_metadata_keys(self, metadata): + """Clean up metadata keys""" + if metadata is None: + return None + if not isinstance(metadata, dict): + raise ValueError('Input must be a dictionary') + + def transform_key(key): + return str(key).replace(' ', '_').lower() + + meta = {transform_key(key): value for key, value in metadata.items()} + return meta + + def _convert_to_bigquery_type(self, redshift_type: str) -> str: + """Convert Redshift data types to equivalent BigQuery data types.""" + type_mapping = { + # Numeric types + 'INTEGER': 'INT64', + 'INT': 'INT64', + 'SMALLINT': 'INT64', + 'BIGINT': 'INT64', + 'DECIMAL': 'NUMERIC', + 'NUMERIC': 'NUMERIC', + 'REAL': 'FLOAT64', + 'DOUBLE PRECISION': 'FLOAT64', + 'FLOAT': 'FLOAT64', + # Character types + 'CHAR': 'STRING', + 'CHARACTER': 'STRING', + 'VARCHAR': 'STRING', + 'CHARACTER VARYING': 'STRING', + 'TEXT': 'STRING', + # Date/Time types + 'DATE': 'DATE', + 'TIME': 'TIME', + 'TIMETZ': 'TIME', + 'TIMESTAMP': 'TIMESTAMP', + 'TIMESTAMPTZ': 'TIMESTAMP', + # Boolean type + 'BOOLEAN': 'BOOL', + 'BOOL': 'BOOL', + # JSON + 'SUPER': 'JSON', + } + + # Handle types with precision/scale like DECIMAL(10,2) + base_type = redshift_type.split('(')[0].upper() + if base_type in type_mapping: + if '(' in redshift_type and base_type in ['DECIMAL', 'NUMERIC']: + # Keep the precision/scale for numeric types + precision_scale = redshift_type[redshift_type.find('(') :] + return f'{type_mapping[base_type]}{precision_scale}' + return type_mapping[base_type] + + # Default to STRING for unsupported types + return 'STRING' + + def fetch_gold_schema(self): + return list( + filter( + lambda x: x['name'] == TableNameConstants.GOLD_LOAN_DATA, + self.schema['tables'], + ) + )[0], list( + filter( + lambda x: x['name'] == TableNameConstants.GOLD_ITEM_DATA, + self.schema['tables'], + ) + )[0] diff --git a/wavefront/server/packages/flo_utils/flo_utils/llm/__init__.py b/wavefront/server/packages/flo_utils/flo_utils/llm/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/packages/flo_utils/flo_utils/main.py b/wavefront/server/packages/flo_utils/flo_utils/main.py new file mode 100644 index 00000000..60f02761 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/main.py @@ -0,0 +1,6 @@ +def main(): + print('Hello from utils!') + + +if __name__ == '__main__': + main() diff --git a/wavefront/server/packages/flo_utils/flo_utils/streaming/event_message.py b/wavefront/server/packages/flo_utils/flo_utils/streaming/event_message.py new file mode 100644 index 00000000..806d2069 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/streaming/event_message.py @@ -0,0 +1,8 @@ +from dataclasses import dataclass + + +@dataclass +class BaseEventMessage: + id: str + ack_id: str + body: dict diff --git a/wavefront/server/packages/flo_utils/flo_utils/streaming/message_processor.py b/wavefront/server/packages/flo_utils/flo_utils/streaming/message_processor.py new file mode 100644 index 00000000..cda9aada --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/streaming/message_processor.py @@ -0,0 +1,27 @@ +from abc import ABC, abstractmethod +from typing import Optional, TypeVar, Generic +from dataclasses import dataclass +from typing import List +from flo_utils.streaming.event_message import BaseEventMessage + +T = TypeVar('T') # Type for insight + + +@dataclass +class ProcessingResult(Generic[T]): + success: bool + insights: Optional[T] = None + error: Optional[str] = None + + +class MessageProcessor(ABC, Generic[T]): + """Base class for all message processors""" + + @abstractmethod + async def process(self, message: BaseEventMessage) -> ProcessingResult: + pass + + @abstractmethod + def store(self, insights: List[T], is_failed: bool = False) -> bool: + """Store insights using appropriate repositories""" + pass diff --git a/wavefront/server/packages/flo_utils/flo_utils/streaming/stream_listner.py b/wavefront/server/packages/flo_utils/flo_utils/streaming/stream_listner.py new file mode 100644 index 00000000..767636b8 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/streaming/stream_listner.py @@ -0,0 +1,158 @@ +import asyncio +import concurrent.futures +from typing import List +from flo_cloud._types import MessageQueueDict +from common_module.log.logger import logger +from db_repo_module.cache.cache_manager import CacheManager +from flo_cloud._types import MessageQueue + +from abc import ABC, abstractmethod +from flo_utils.streaming.event_message import BaseEventMessage +from flo_utils.streaming.message_processor import MessageProcessor, ProcessingResult + + +class StreamListener(ABC): + def __init__( + self, + event_manager: MessageQueue, + processor: MessageProcessor, + cache_manager: CacheManager, + retry_count: int, + streaming_batch_size: int = 5, + ): + self.event_manager = event_manager + self.processor = processor + self.cache_manager = cache_manager + self.retry_count = retry_count + self.streaming_batch_size = streaming_batch_size + + def handle_error( + self, + message_id: str, + message_receipt_id: str, + processor: MessageProcessor = None, + insights_to_commit: List = [], + ): + try: + error_key = f'error_{message_id}' + current_retry_count = self.cache_manager.get_int(error_key, 0) + if current_retry_count >= self.retry_count: + logger.error( + f'Max retries exceeded for {message_id}. Removing from queue.' + ) + self.delete_message(message_receipt_id) + if processor and len(insights_to_commit) > 0: + logger.error( + f'Storing failed insights for {message_id} after max retries.' + ) + processor.store(insights_to_commit, as_failed=True) + self.cache_manager.remove(error_key) + else: + self.cache_manager.add(error_key, current_retry_count + 1, expiry=3600) + logger.warning( + f'Retrying {message_id}. Attempt {current_retry_count} of {self.retry_count}' + ) + except Exception as e: + logger.error(f'Error in error handling: {e}') + + def delete_message(self, message_id: str): + try: + self.event_manager.delete_message(message_id) + except Exception as e: + logger.error(f'Failed to delete message: {e}') + + @abstractmethod + def get_event_messages( + self, messages: List[MessageQueueDict] + ) -> List[BaseEventMessage]: + pass + + async def receive_queue_messages(self, worker_id: str): + while True: + try: + response = self.event_manager.receive_messages( + max_messages=self.streaming_batch_size + ) + messages: List[BaseEventMessage] = self.get_event_messages(response) + logger.info(f'{worker_id}: listening for messages...') + if not messages: + await asyncio.sleep(5) + continue + + message_ids_to_delete = [] + insights_to_commit: List[ProcessingResult] = [] + + for message in messages: + message_receipt_id = message.ack_id + message_id_str = message.id + + if self.cache_manager.get_str(str(message_id_str)): + continue + + self.cache_manager.add(str(message_id_str), '1') + try: + result: ProcessingResult = await asyncio.wait_for( + self.processor.process(message), timeout=60 * 5 + ) + + if result.success: + insights_to_commit.append(result) + message_ids_to_delete.append(message_receipt_id) + else: + self.handle_error(message_id_str, message_receipt_id) + + except asyncio.TimeoutError: + logger.error( + f'Task timed out after 5 minutes for message id: {message_id_str}' + ) + self.handle_error( + message_id_str, + message_receipt_id, + self.processor, + insights_to_commit, + ) + if insights_to_commit and self.processor: + is_successful = self.processor.store(insights_to_commit) + if is_successful: + logger.info( + f'Successfully stored insights for {len(insights_to_commit)} items' + ) + for message_receipt_id in message_ids_to_delete: + self.delete_message(message_receipt_id) + else: + logger.error( + f'Failed to store insights for {len(insights_to_commit)} items' + ) + self.handle_error( + message_id_str, + message_receipt_id, + self.processor, + insights_to_commit, + ) + except Exception as e: + logger.error( + f'Unexpected error in message processing: {e}', exc_info=True + ) + await asyncio.sleep(10) + + def run_workers(self, thread_count: int): + with concurrent.futures.ThreadPoolExecutor( + max_workers=thread_count + ) as executor: + futures = [ + executor.submit(self._run_worker, f'Worker {i+1}') + for i in range(thread_count) + ] + concurrent.futures.wait(futures) + + logger.warning('All workers have stopped') + + def _run_worker(self, worker_id: str): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + loop.run_until_complete(self.receive_queue_messages(worker_id)) + except Exception as e: + logger.error(f'Worker {worker_id} crashed: {e}') + finally: + loop.close() diff --git a/wavefront/server/packages/flo_utils/flo_utils/utils/helper.py b/wavefront/server/packages/flo_utils/flo_utils/utils/helper.py new file mode 100644 index 00000000..4df2d204 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/utils/helper.py @@ -0,0 +1,11 @@ +import tiktoken +from flo_utils.utils.log import logger + + +def truncate_to_n_tokens(text: str, n: int, model: str = 'gpt-4'): + """Truncate a string to at most `n` tokens using OpenAI's tokenizer.""" + enc = tiktoken.encoding_for_model(model) + input_tkns = enc.encode(text) + logger.info(f'Total input tokens: {len(input_tkns)}, truncating to {n}') + tokens = input_tkns[:n] + return enc.decode(tokens) diff --git a/wavefront/server/packages/flo_utils/flo_utils/utils/log.py b/wavefront/server/packages/flo_utils/flo_utils/utils/log.py new file mode 100644 index 00000000..77e24395 --- /dev/null +++ b/wavefront/server/packages/flo_utils/flo_utils/utils/log.py @@ -0,0 +1,25 @@ +import os +import logging + +log_level = os.environ.get('LOG_LEVEL', 'INFO') +log_format = ( + '%(asctime)s | %(levelname)-8s | %(name)s | %(filename)s:%(lineno)d | %(message)s' +) +logging.basicConfig( + level=log_level, + format=log_format, + datefmt='%Y-%m-%d %H:%M:%S', +) + + +class CustomLogger(logging.Logger): + def error(self, msg, *args, **kwargs): + """Override the error method to always include exc_info=True.""" + if 'exc_info' not in kwargs: + kwargs['exc_info'] = True + super().error(msg, *args, **kwargs) + + +# Set the custom logger class +logging.setLoggerClass(CustomLogger) +logger = logging.getLogger('Auraflo') diff --git a/wavefront/server/packages/flo_utils/pyproject.toml b/wavefront/server/packages/flo_utils/pyproject.toml new file mode 100644 index 00000000..867546e9 --- /dev/null +++ b/wavefront/server/packages/flo_utils/pyproject.toml @@ -0,0 +1,35 @@ +[project] +name = "flo-utils" +version = "0.1.0" +description = "Add your description here" +authors = [ + { name = "rootflo engineering", email = "engineering@rootflo.ai" } +] +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "boto3<=1.38.40", + "google-cloud-kms>=3.5.1", + "tenacity>=8.4.1", + "common-module", + "db-repo-module", + "pyyaml>=6.0.3,<7" +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[tool.uv.sources] +common-module = { workspace = true } +db-repo-module = { workspace = true } + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["flo_utils"] diff --git a/wavefront/server/plugins/authenticator/README.md b/wavefront/server/plugins/authenticator/README.md new file mode 100644 index 00000000..92a2d26f --- /dev/null +++ b/wavefront/server/plugins/authenticator/README.md @@ -0,0 +1,222 @@ +# Authenticator Plugin + +A unified authentication plugin system that supports multiple authentication methods including email/password, Google OAuth, and Microsoft OAuth. + +## Features + +- **Unified Authentication Interface**: Single API for all authentication methods +- **Multiple Authenticator Support**: Email/password, Google OAuth, Microsoft OAuth +- **Admin Controls**: Enable/disable authentication methods via admin API +- **Session Management**: Integrated with existing session system +- **Singleton Pattern**: Each authenticator type uses singleton pattern for efficiency +- **Configuration Validation**: Built-in validation for all authenticator configurations +- **Health Monitoring**: Health check endpoints for each authenticator + +## Architecture + +### Core Components + +1. **AuthenticatorABC**: Abstract base class defining the interface for all authenticators +2. **AuthenticatorFactory**: Thread-safe singleton factory that manages authenticator instances with caching and lifecycle management +3. **SessionManager**: Handles session creation and management for all auth types +4. **Individual Authenticators**: Specific implementations for each auth method +5. **Controller Separation**: + - `authenticator_controller`: Manages individual authenticator instances + - `allowed_authenticator_controller`: Manages system-wide authenticator type settings + +### Supported Authenticator Types + +- `email_password`: Traditional email and password authentication +- `google_oauth`: Google OAuth 2.0 authentication +- `microsoft_oauth`: Microsoft OAuth 2.0 authentication +- `saml`: SAML single sign-on (planned) +- `ldap`: LDAP directory authentication (planned) + +## Configuration + +### Email/Password Configuration + +```json +{ + "password_policy": { + "min_length": 8, + "require_uppercase": true, + "require_lowercase": true, + "require_numbers": true, + "require_special_chars": false, + "max_attempts": 5, + "lockout_duration": 900 + }, + "two_factor_enabled": false, + "password_reset_enabled": true, + "session_timeout": 3600, + "rate_limit_enabled": true +} +``` + +### Google OAuth Configuration + +```json +{ + "client_id": "your_google_client_id", + "client_secret": "your_google_client_secret", + "redirect_uri": "https://your-domain.com/auth/google/callback", + "scopes": ["openid", "email", "profile"], + "hosted_domain": null, + "access_type": "offline", + "prompt": "consent" +} +``` + +### Microsoft OAuth Configuration + +```json +{ + "client_id": "your_microsoft_client_id", + "client_secret": "your_microsoft_client_secret", + "tenant_id": "your_tenant_id", + "redirect_uri": "https://your-domain.com/auth/microsoft/callback", + "scopes": ["openid", "email", "profile"], + "authority": "https://login.microsoftonline.com/", + "response_type": "code", + "response_mode": "query" +} +``` + +## API Endpoints + +### Authentication Endpoints + +- `POST /v1/auth/authenticate` - Unified authentication endpoint +- `POST /v1/auth/oauth/init` - Initialize OAuth flow +- `GET /v1/auth/oauth/callback/google` - Google OAuth callback +- `GET /v1/auth/oauth/callback/microsoft` - Microsoft OAuth callback + +### Admin Endpoints + +#### Authenticator Instance Management +- `POST /v1/authenticators` - Create authenticator configuration +- `GET /v1/authenticators/{auth_name}` - Get authenticator configuration +- `PUT /v1/authenticators/{auth_name}` - Update authenticator configuration +- `DELETE /v1/authenticators/{auth_name}` - Delete authenticator configuration +- `GET /v1/authenticators/{auth_name}/health` - Check authenticator health + +#### Allowed Authenticator Type Management +- `GET /v1/allowed-authenticators/types` - Get enabled authenticator types +- `POST /v1/allowed-authenticators/types/{auth_type}/enable` - Enable authenticator type +- `POST /v1/allowed-authenticators/types/{auth_type}/disable` - Disable authenticator type + +## Usage Examples + +### Email/Password Authentication + +```json +POST /v1/auth/authenticate +{ + "auth_type": "email_password", + "credentials": { + "email": "user@example.com", + "password": "securepassword" + } +} +``` + +### Google OAuth Flow + +1. Initialize OAuth flow: +```json +POST /v1/auth/oauth/init +{ + "auth_type": "google_oauth" +} +``` + +2. Redirect user to returned `authorization_url` + +3. Handle callback automatically at `/v1/auth/oauth/callback/google` + +### Microsoft OAuth Flow + +1. Initialize OAuth flow: +```json +POST /v1/auth/oauth/init +{ + "auth_type": "microsoft_oauth" +} +``` + +2. Redirect user to returned `authorization_url` + +3. Handle callback automatically at `/v1/auth/oauth/callback/microsoft` + +## Database Setup + +Run the setup script to initialize the database: + +```sql +-- Run setup_authenticator_data.sql to populate initial data +``` + +This will create: +- Allowed authenticator types in `allowed_authenticator` table +- Default configurations in `authenticator` table + +## Security Features + +- **Rate Limiting**: Built-in rate limiting for email/password authentication +- **Password Policies**: Configurable password complexity requirements +- **Session Management**: Secure session creation and validation +- **Token Encryption**: OAuth tokens and secrets are stored securely +- **Domain Restrictions**: Google OAuth supports hosted domain restrictions + +## Development + +### Adding New Authenticators + +1. Create new authenticator class extending `AuthenticatorABC` +2. Implement all required methods +3. Add configuration class +4. Update `AuthenticatorPlugin` factory to include new type +5. Add database entry to `allowed_authenticator` table + +### Testing + +Test endpoints using the health check APIs: + +```bash +# Test authenticator health +GET /v1/authenticators/{auth_name}/health + +# Test specific auth flow +POST /v1/auth/authenticate +``` + +## Dependencies + +- `requests`: For OAuth API calls to Google and Microsoft endpoints + +## Recent Improvements + +### v0.1.0 Updates + +- **Factory Deadlock Fix**: Resolved a critical deadlock issue in `AuthenticatorFactory.update_authenticator()` that was causing API freezes during authenticator updates +- **Controller Separation**: Split authenticator management into two separate controllers for better organization: + - `authenticator_controller`: Manages individual authenticator instances + - `allowed_authenticator_controller`: Manages system-wide authenticator type enablement +- **Dependency Cleanup**: Removed unnecessary dependencies and ensured all required packages are properly declared +- **Threading Improvements**: Enhanced thread safety in the factory pattern with better lock management + +### Performance Enhancements + +- **Caching**: Authenticator instances are cached to improve performance +- **Validation**: Configuration validation happens before instance creation to prevent unnecessary object creation +- **Error Handling**: Improved error handling with proper exception propagation + +## Error Handling + +All authenticators return standardized `AuthResult` objects with: +- `success`: Boolean indicating success/failure +- `user_info`: User information on success +- `access_token`/`refresh_token`: OAuth tokens when applicable +- `error`: Error message on failure +- `error_code`: Machine-readable error code \ No newline at end of file diff --git a/wavefront/server/plugins/authenticator/authenticator/__init__.py b/wavefront/server/plugins/authenticator/authenticator/__init__.py new file mode 100644 index 00000000..d87f4e2d --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/__init__.py @@ -0,0 +1,27 @@ +from .factory import AuthenticatorFactory, get_authenticator_factory +from .types import ( + AuthenticatorType, + AuthResult, + TokenResult, + HealthStatus, + UserInfo, + AuthenticatorABC, +) + +from .email_password.config import EmailPasswordConfig +from .google_oauth.config import GoogleOAuthConfig +from .microsoft_oauth.config import MicrosoftOAuthConfig + +__all__ = [ + 'AuthenticatorFactory', + 'get_authenticator_factory', + 'AuthenticatorType', + 'AuthResult', + 'TokenResult', + 'HealthStatus', + 'UserInfo', + 'AuthenticatorABC', + 'EmailPasswordConfig', + 'GoogleOAuthConfig', + 'MicrosoftOAuthConfig', +] diff --git a/wavefront/server/plugins/authenticator/authenticator/email_password/__init__.py b/wavefront/server/plugins/authenticator/authenticator/email_password/__init__.py new file mode 100644 index 00000000..c7247aa2 --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/email_password/__init__.py @@ -0,0 +1,4 @@ +from .authenticator import EmailPasswordAuthenticator +from .config import EmailPasswordConfig + +__all__ = ['EmailPasswordAuthenticator', 'EmailPasswordConfig'] diff --git a/wavefront/server/plugins/authenticator/authenticator/email_password/authenticator.py b/wavefront/server/plugins/authenticator/authenticator/email_password/authenticator.py new file mode 100644 index 00000000..76c77fbd --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/email_password/authenticator.py @@ -0,0 +1,218 @@ +import time +from datetime import datetime +from typing import Dict, Any, Optional +from collections import defaultdict + +from ..types import AuthenticatorABC, AuthResult, TokenResult, HealthStatus, UserInfo +from .config import EmailPasswordConfig + + +class EmailPasswordAuthenticator(AuthenticatorABC): + """Email and password authenticator implementation.""" + + def __init__(self, config: EmailPasswordConfig): + self.config = config + self.failed_attempts = defaultdict(list) # Track failed login attempts + + @staticmethod + def validate_config_static(config: Dict[str, Any]) -> bool: + """Validate email/password configuration without creating an instance.""" + # Check password policy requirements + policy = config.get('password_policy', {}) + required_fields = ['min_length', 'max_attempts', 'lockout_duration'] + + for field in required_fields: + if field not in policy: + raise ValueError(f'password_policy.{field} is required') + + # Validate values + if policy['min_length'] < 6: + raise ValueError('password_policy.min_length must be at least 6') + + if policy['max_attempts'] < 1: + raise ValueError('password_policy.max_attempts must be at least 1') + + if policy['lockout_duration'] < 60: # At least 1 minute + raise ValueError( + 'password_policy.lockout_duration must be at least 60 seconds' + ) + + return True + + # We are not using this method, coz db dependency needs to be injected + def authenticate(self, credentials: Dict[str, Any]) -> AuthResult: + """ + Authenticate user with email and password. + + Args: + credentials: {"email": "user@example.com", "password": "secret"} + + Returns: + AuthResult: Authentication result + """ + email = credentials.get('email') + password = credentials.get('password') + + if not email or not password: + return AuthResult( + success=False, + error='Email and password are required', + error_code='MISSING_CREDENTIALS', + ) + + # Check rate limiting + if self.config.rate_limit_enabled and self._is_rate_limited(email): + return AuthResult( + success=False, + error='Too many failed attempts. Please try again later.', + error_code='RATE_LIMITED', + ) + + # This would typically validate against a database + # For now, we'll return a placeholder implementation + # The actual validation logic will be integrated with the existing user repository + + # Simulate password validation (this will be replaced with actual DB validation) + if not self._validate_password_strength(password): + return AuthResult( + success=False, + error='Password does not meet security requirements', + error_code='WEAK_PASSWORD', + ) + + # This is a placeholder - actual implementation will validate against database + # and be integrated with the existing user repository + user_info = UserInfo( + email=email, + name=email.split('@')[0], # Placeholder name + provider='email_password', + ) + + return AuthResult(success=True, user_info=user_info) + + def validate_config(self) -> bool: + """Validate the email/password configuration.""" + try: + # Check password policy requirements + policy = self.config.password_policy + required_fields = ['min_length', 'max_attempts', 'lockout_duration'] + + for field in required_fields: + if field not in policy: + return False + + # Validate values + if policy['min_length'] < 6: + return False + + if policy['max_attempts'] < 1: + return False + + if policy['lockout_duration'] < 60: # At least 1 minute + return False + + return True + + except Exception: + return False + + def get_authorization_url(self, state: Optional[str] = None) -> Optional[str]: + """Email/password doesn't need authorization URL.""" + return None + + def handle_callback(self, callback_data: Dict[str, Any]) -> AuthResult: + """Email/password doesn't use OAuth callbacks.""" + return AuthResult( + success=False, + error="Email/password authentication doesn't support callbacks", + error_code='NOT_SUPPORTED', + ) + + def refresh_token(self, refresh_token: str) -> TokenResult: + """Email/password doesn't use refresh tokens directly.""" + return TokenResult( + success=False, + error="Email/password authentication doesn't support token refresh", + ) + + def logout(self, user_session: Dict[str, Any]) -> bool: + """Handle user logout for email/password authentication.""" + # Clear any cached failed attempts for this user + user_id = user_session.get('user_id') + if user_id: + self.failed_attempts.pop(user_id, None) + return True + + def get_health_status(self) -> HealthStatus: + """Get health status of email/password authenticator.""" + return HealthStatus( + healthy=True, + message='Email/password authenticator is operational', + last_check=datetime.now(), + details={ + 'config_valid': self.validate_config(), + 'rate_limiting_enabled': self.config.rate_limit_enabled, + 'two_factor_enabled': self.config.two_factor_enabled, + }, + ) + + def get_user_info(self, access_token: str) -> Optional[UserInfo]: + """Get user info - not applicable for email/password auth.""" + return None + + def _is_rate_limited(self, email: str) -> bool: + """Check if user is rate limited based on failed attempts.""" + if not self.config.rate_limit_enabled: + return False + + now = time.time() + max_attempts = self.config.password_policy['max_attempts'] + lockout_duration = self.config.password_policy['lockout_duration'] + + # Clean old attempts + self.failed_attempts[email] = [ + attempt_time + for attempt_time in self.failed_attempts[email] + if now - attempt_time < lockout_duration + ] + + return len(self.failed_attempts[email]) >= max_attempts + + def _record_failed_attempt(self, email: str) -> None: + """Record a failed login attempt.""" + if self.config.rate_limit_enabled: + self.failed_attempts[email].append(time.time()) + + def _validate_password_strength(self, password: str) -> bool: + """Validate password against configured policy.""" + policy = self.config.password_policy + + # Check minimum length + if len(password) < policy.get('min_length', 8): + return False + + # Check uppercase requirement + if policy.get('require_uppercase', False) and not any( + c.isupper() for c in password + ): + return False + + # Check lowercase requirement + if policy.get('require_lowercase', False) and not any( + c.islower() for c in password + ): + return False + + # Check number requirement + if policy.get('require_numbers', False) and not any( + c.isdigit() for c in password + ): + return False + + # Check special character requirement + if policy.get('require_special_chars', False): + special_chars = '!@#$%^&*(),.?":{}|<>' + if not any(c in special_chars for c in password): + return False + + return True diff --git a/wavefront/server/plugins/authenticator/authenticator/email_password/config.py b/wavefront/server/plugins/authenticator/authenticator/email_password/config.py new file mode 100644 index 00000000..484a061c --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/email_password/config.py @@ -0,0 +1,21 @@ +from dataclasses import dataclass, field +from typing import Dict, Any + + +@dataclass +class EmailPasswordConfig: + password_policy: Dict[str, Any] = field( + default_factory=lambda: { + 'min_length': 8, + 'require_uppercase': True, + 'require_lowercase': True, + 'require_numbers': True, + 'require_special_chars': False, + 'max_attempts': 5, + 'lockout_duration': 900, # 15 minutes + } + ) + two_factor_enabled: bool = False + password_reset_enabled: bool = True + session_timeout: int = 3600 # 1 hour + rate_limit_enabled: bool = True diff --git a/wavefront/server/plugins/authenticator/authenticator/factory.py b/wavefront/server/plugins/authenticator/authenticator/factory.py new file mode 100644 index 00000000..b33bd3c3 --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/factory.py @@ -0,0 +1,217 @@ +import threading +from typing import Dict, Optional + +from .types import AuthenticatorType, AuthenticatorABC +from .email_password import EmailPasswordAuthenticator +from .google_oauth import GoogleOAuthAuthenticator +from .microsoft_oauth import MicrosoftOAuthAuthenticator +from .email_password.config import EmailPasswordConfig +from .google_oauth.config import GoogleOAuthConfig +from .microsoft_oauth.config import MicrosoftOAuthConfig + + +class AuthenticatorFactory: + """Factory class for managing authenticator instances with caching and lifecycle management.""" + + _instance = None + _lock = threading.Lock() + + def __new__(cls): + """Singleton pattern for factory itself.""" + if cls._instance is None: + with cls._lock: + if cls._instance is None: + cls._instance = super(AuthenticatorFactory, cls).__new__(cls) + return cls._instance + + def __init__(self): + if not hasattr(self, '_initialized'): + self._google_instances: Dict[str, GoogleOAuthAuthenticator] = {} + self._microsoft_instances: Dict[str, MicrosoftOAuthAuthenticator] = {} + self._email_instances: Dict[str, EmailPasswordAuthenticator] = {} + self._instances_lock = threading.Lock() + self._initialized = True + + def get_authenticator( + self, auth_id: str, auth_type: AuthenticatorType, config: Dict[str, any] + ) -> AuthenticatorABC: + """ + Get or create an authenticator instance for the given auth_id. + + Args: + auth_id: Unique ID for the authenticator instance + auth_type: Type of authenticator (GOOGLE_OAUTH, MICROSOFT_OAUTH, EMAIL_PASSWORD) + config: Configuration dictionary for the authenticator + + Returns: + AuthenticatorABC: The authenticator instance + + Raises: + ValueError: If auth_type is not supported + RuntimeError: If authenticator initialization fails + """ + with self._instances_lock: + instance_cache = self._get_cache_for_type(auth_type) + + # Check if instance already exists + if auth_id in instance_cache: + return instance_cache[auth_id] + + # Create new instance + authenticator = self._create_authenticator(auth_type, config) + instance_cache[auth_id] = authenticator + + return authenticator + + def validate_config( + self, auth_type: AuthenticatorType, config: Dict[str, any] + ) -> bool: + """ + Validate configuration using appropriate static validation method. + + Args: + auth_type: Type of authenticator + config: Configuration dictionary to validate + + Returns: + bool: True if configuration is valid + + Raises: + ValueError: If configuration is invalid with specific error message + """ + if auth_type == AuthenticatorType.EMAIL_PASSWORD: + return EmailPasswordAuthenticator.validate_config_static(config) + elif auth_type == AuthenticatorType.GOOGLE_OAUTH: + return GoogleOAuthAuthenticator.validate_config_static(config) + elif auth_type == AuthenticatorType.MICROSOFT_OAUTH: + return MicrosoftOAuthAuthenticator.validate_config_static(config) + else: + raise ValueError(f'Unsupported authenticator type: {auth_type}') + + def remove_authenticator(self, auth_id: str, auth_type: AuthenticatorType) -> bool: + """ + Remove an authenticator instance from the cache. + + Args: + auth_id: ID of the authenticator to remove + auth_type: Type of authenticator + + Returns: + bool: True if instance was removed, False if not found + """ + with self._instances_lock: + instance_cache = self._get_cache_for_type(auth_type) + + if auth_id in instance_cache: + del instance_cache[auth_id] + return True + + return False + + def update_authenticator( + self, auth_id: str, auth_type: AuthenticatorType, config: Dict[str, any] + ) -> AuthenticatorABC: + """ + Update an authenticator instance with new configuration. + This validates first, then removes the old instance and creates a new one. + + Args: + auth_id: ID of the authenticator to update + auth_type: Type of authenticator + config: New configuration dictionary + + Returns: + AuthenticatorABC: The updated authenticator instance + """ + + # Validate config BEFORE acquiring lock + self.validate_config(auth_type, config) + + with self._instances_lock: + # Remove old instance if exists + instance_cache = self._get_cache_for_type(auth_type) + if auth_id in instance_cache: + del instance_cache[auth_id] + + # Create new instance WITHOUT calling get_authenticator to avoid deadlock + authenticator = self._create_authenticator(auth_type, config) + instance_cache[auth_id] = authenticator + return authenticator + + def get_cached_instance_count( + self, auth_type: Optional[AuthenticatorType] = None + ) -> int: + """ + Get the number of cached instances for debugging/monitoring. + + Args: + auth_type: Optional filter by auth_type + + Returns: + int: Number of cached instances + """ + with self._instances_lock: + if auth_type: + return len(self._get_cache_for_type(auth_type)) + + return ( + len(self._google_instances) + + len(self._microsoft_instances) + + len(self._email_instances) + ) + + def clear_all_instances(self) -> None: + """Clear all cached instances. Useful for testing or cleanup.""" + with self._instances_lock: + self._google_instances.clear() + self._microsoft_instances.clear() + self._email_instances.clear() + + def _get_cache_for_type( + self, auth_type: AuthenticatorType + ) -> Dict[str, AuthenticatorABC]: + """Get the appropriate instance cache for the given auth_type.""" + if auth_type == AuthenticatorType.GOOGLE_OAUTH: + return self._google_instances + elif auth_type == AuthenticatorType.MICROSOFT_OAUTH: + return self._microsoft_instances + elif auth_type == AuthenticatorType.EMAIL_PASSWORD: + return self._email_instances + else: + raise ValueError(f'Unsupported authenticator type: {auth_type}') + + def _create_authenticator( + self, auth_type: AuthenticatorType, config: Dict[str, any] + ) -> AuthenticatorABC: + """Create a new authenticator instance based on type and config.""" + if auth_type == AuthenticatorType.EMAIL_PASSWORD: + typed_config = EmailPasswordConfig(**config) + return EmailPasswordAuthenticator(typed_config) + + elif auth_type == AuthenticatorType.GOOGLE_OAUTH: + typed_config = GoogleOAuthConfig(**config) + return GoogleOAuthAuthenticator(typed_config) + + elif auth_type == AuthenticatorType.MICROSOFT_OAUTH: + typed_config = MicrosoftOAuthConfig(**config) + return MicrosoftOAuthAuthenticator(typed_config) + + else: + raise ValueError(f'Unsupported authenticator type: {auth_type}') + + +# Global factory instance +_factory_instance = None +_factory_lock = threading.Lock() + + +def get_authenticator_factory() -> AuthenticatorFactory: + """Get the global AuthenticatorFactory instance.""" + global _factory_instance + + if _factory_instance is None: + with _factory_lock: + if _factory_instance is None: + _factory_instance = AuthenticatorFactory() + + return _factory_instance diff --git a/wavefront/server/plugins/authenticator/authenticator/google_oauth/__init__.py b/wavefront/server/plugins/authenticator/authenticator/google_oauth/__init__.py new file mode 100644 index 00000000..241a224f --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/google_oauth/__init__.py @@ -0,0 +1,4 @@ +from .authenticator import GoogleOAuthAuthenticator +from .config import GoogleOAuthConfig + +__all__ = ['GoogleOAuthAuthenticator', 'GoogleOAuthConfig'] diff --git a/wavefront/server/plugins/authenticator/authenticator/google_oauth/authenticator.py b/wavefront/server/plugins/authenticator/authenticator/google_oauth/authenticator.py new file mode 100644 index 00000000..5c4d5b6d --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/google_oauth/authenticator.py @@ -0,0 +1,317 @@ +import json +import requests +from datetime import datetime +from typing import Dict, Any, Optional +from urllib.parse import urlencode, urlparse + +from ..types import AuthenticatorABC, AuthResult, TokenResult, HealthStatus, UserInfo +from .config import GoogleOAuthConfig + + +class GoogleOAuthAuthenticator(AuthenticatorABC): + """Google OAuth 2.0 authenticator implementation.""" + + def __init__(self, config: GoogleOAuthConfig): + self.config = config + self.auth_url = 'https://accounts.google.com/o/oauth2/v2/auth' + self.token_url = 'https://oauth2.googleapis.com/token' + self.userinfo_url = 'https://www.googleapis.com/oauth2/v2/userinfo' + + @staticmethod + def validate_config_static(config: Dict[str, Any]) -> bool: + """Validate Google OAuth configuration without creating an instance.""" + # Check required fields + required_fields = [ + 'client_id', + 'client_secret', + 'redirect_uri', + 'client_redirect_success_url', + 'client_redirect_failure_url', + 'scopes', + ] + for field in required_fields: + if not config.get(field): + raise ValueError(f'{field} is required') + + # Validate redirect URI format + parsed_uri = urlparse(config['redirect_uri']) + if not parsed_uri.scheme or not parsed_uri.netloc: + raise ValueError('redirect_uri must be a valid URL with scheme and netloc') + + # Validate client redirect URLs + for url_field in ['client_redirect_success_url', 'client_redirect_failure_url']: + parsed_url = urlparse(config[url_field]) + if not parsed_url.scheme or not parsed_url.netloc: + raise ValueError( + f'{url_field} must be a valid URL with scheme and netloc' + ) + + # Validate scopes + scopes = config.get('scopes', []) + if not scopes or len(scopes) == 0: + raise ValueError('scopes array cannot be empty') + + return True + + def authenticate(self, credentials: Dict[str, Any]) -> AuthResult: + """ + Authenticate user with Google OAuth. + + Args: + credentials: {"authorization_code": "code", "state": "state"} + + Returns: + AuthResult: Authentication result + """ + authorization_code = credentials.get('authorization_code') + # state = credentials.get('state') + + if not authorization_code: + return AuthResult( + success=False, + error='Authorization code is required', + error_code='MISSING_AUTH_CODE', + ) + + # Exchange authorization code for access token + token_result = self._exchange_code_for_token(authorization_code) + + if not token_result.success: + return AuthResult( + success=False, + error=token_result.error, + error_code='TOKEN_EXCHANGE_FAILED', + ) + + # Get user info from Google + user_info = self._get_user_info_from_google(token_result.access_token) + + if not user_info: + return AuthResult( + success=False, + error='Failed to retrieve user information from Google', + error_code='USER_INFO_FAILED', + ) + + # Check hosted domain restriction if configured + if self.config.hosted_domain: + user_domain = ( + user_info.email.split('@')[1] if '@' in user_info.email else None + ) + if user_domain != self.config.hosted_domain: + return AuthResult( + success=False, + error=f'Email domain {user_domain} is not allowed', + error_code='DOMAIN_NOT_ALLOWED', + ) + + return AuthResult( + success=True, + user_info=user_info, + access_token=token_result.access_token, + refresh_token=token_result.refresh_token, + ) + + def validate_config(self) -> bool: + """Validate the Google OAuth configuration.""" + try: + # Check required fields + required_fields = [ + 'client_id', + 'client_secret', + 'redirect_uri', + 'client_redirect_success_url', + 'client_redirect_failure_url', + 'scopes', + ] + for field in required_fields: + if not getattr(self.config, field, None): + return False + + # Validate redirect URI format + parsed_uri = urlparse(self.config.redirect_uri) + if not parsed_uri.scheme or not parsed_uri.netloc: + return False + + # Validate client redirect URLs + parsed_url = urlparse(self.config.client_redirect_success_url) + if not parsed_url.scheme or not parsed_url.netloc: + return False + + parsed_url = urlparse(self.config.client_redirect_failure_url) + if not parsed_url.scheme or not parsed_url.netloc: + return False + + # Validate scopes + if not self.config.scopes or len(self.config.scopes) == 0: + return False + + return True + + except Exception: + return False + + def get_authorization_url(self, state: Optional[str] = None) -> Optional[str]: + """Get the Google OAuth authorization URL.""" + if not state: + raise ValueError("State doesn't exist Google Oauth") + + state_obj = json.loads(state) + + if state_obj['auth_id'] is None: + raise ValueError("Auth Id doesn't exist in Google Oauth state") + + params = { + 'response_type': 'code', + 'client_id': self.config.client_id, + 'redirect_uri': self.config.redirect_uri, + 'scope': ' '.join(self.config.scopes), + 'state': state, + 'access_type': self.config.access_type, + 'prompt': self.config.prompt, + } + + if self.config.hosted_domain: + params['hd'] = self.config.hosted_domain + + return f'{self.auth_url}?{urlencode(params)}' + + def handle_callback(self, callback_data: Dict[str, Any]) -> AuthResult: + """Handle Google OAuth callback.""" + return self.authenticate(callback_data) + + def refresh_token(self, refresh_token: str) -> TokenResult: + """Refresh Google OAuth access token.""" + if not refresh_token: + return TokenResult(success=False, error='Refresh token is required') + + data = { + 'grant_type': 'refresh_token', + 'refresh_token': refresh_token, + 'client_id': self.config.client_id, + 'client_secret': self.config.client_secret, + } + + try: + response = requests.post(self.token_url, data=data, timeout=10) + response.raise_for_status() + + token_data = response.json() + + return TokenResult( + success=True, + access_token=token_data.get('access_token'), + refresh_token=token_data.get( + 'refresh_token', refresh_token + ), # Keep old if not returned + expires_in=token_data.get('expires_in'), + ) + + except requests.exceptions.RequestException as e: + return TokenResult(success=False, error=f'Token refresh failed: {str(e)}') + except json.JSONDecodeError: + return TokenResult( + success=False, error='Invalid response from Google token endpoint' + ) + + def logout(self, user_session: Dict[str, Any]) -> bool: + """Handle user logout for Google OAuth.""" + # Optionally revoke Google tokens + access_token = user_session.get('access_token') + if access_token: + try: + revoke_url = ( + f'https://oauth2.googleapis.com/revoke?token={access_token}' + ) + requests.post(revoke_url, timeout=5) + except Exception: + pass # Non-critical if revocation fails + + return True + + def get_health_status(self) -> HealthStatus: + """Get health status of Google OAuth authenticator.""" + is_healthy = True + details = { + 'config_valid': self.validate_config(), + 'hosted_domain': self.config.hosted_domain, + 'scopes': self.config.scopes, + } + + # Test Google OAuth endpoints connectivity + try: + response = requests.get('https://www.googleapis.com', timeout=5) + details['google_api_reachable'] = response.status_code == 200 + except Exception: + details['google_api_reachable'] = False + is_healthy = False + + return HealthStatus( + healthy=is_healthy, + message='Google OAuth authenticator is operational' + if is_healthy + else 'Google APIs unreachable', + last_check=datetime.now(), + details=details, + ) + + def get_user_info(self, access_token: str) -> Optional[UserInfo]: + """Get user information from Google using access token.""" + return self._get_user_info_from_google(access_token) + + def _exchange_code_for_token(self, authorization_code: str) -> TokenResult: + """Exchange authorization code for access token.""" + data = { + 'grant_type': 'authorization_code', + 'code': authorization_code, + 'client_id': self.config.client_id, + 'client_secret': self.config.client_secret, + 'redirect_uri': self.config.redirect_uri, + } + + try: + response = requests.post(self.token_url, data=data, timeout=10) + response.raise_for_status() + + token_data = response.json() + + return TokenResult( + success=True, + access_token=token_data.get('access_token'), + refresh_token=token_data.get('refresh_token'), + expires_in=token_data.get('expires_in'), + ) + + except requests.exceptions.RequestException as e: + return TokenResult(success=False, error=f'Token exchange failed: {str(e)}') + except json.JSONDecodeError: + return TokenResult( + success=False, error='Invalid response from Google token endpoint' + ) + + def _get_user_info_from_google(self, access_token: str) -> Optional[UserInfo]: + """Get user information from Google API.""" + try: + headers = {'Authorization': f'Bearer {access_token}'} + response = requests.get(self.userinfo_url, headers=headers, timeout=10) + response.raise_for_status() + + user_data = response.json() + + return UserInfo( + email=user_data.get('email'), + first_name=user_data.get('first_name'), + last_name=user_data.get('last_name'), + user_id=user_data.get('id'), + provider='google', + avatar_url=user_data.get('picture'), + additional_info={ + 'given_name': user_data.get('given_name'), + 'family_name': user_data.get('family_name'), + 'locale': user_data.get('locale'), + 'verified_email': user_data.get('verified_email'), + }, + ) + + except Exception: + return None diff --git a/wavefront/server/plugins/authenticator/authenticator/google_oauth/config.py b/wavefront/server/plugins/authenticator/authenticator/google_oauth/config.py new file mode 100644 index 00000000..480f655f --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/google_oauth/config.py @@ -0,0 +1,15 @@ +from dataclasses import dataclass, field +from typing import Optional + + +@dataclass +class GoogleOAuthConfig: + client_id: str + client_secret: str + redirect_uri: str + client_redirect_success_url: str + client_redirect_failure_url: str + scopes: list[str] = field(default_factory=lambda: ['openid', 'email', 'profile']) + hosted_domain: Optional[str] = None # Restrict to specific domain + access_type: str = 'offline' # To get refresh token + prompt: str = 'consent' # To ensure refresh token is always returned diff --git a/wavefront/server/plugins/authenticator/authenticator/helper.py b/wavefront/server/plugins/authenticator/authenticator/helper.py new file mode 100644 index 00000000..cdc96e02 --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/helper.py @@ -0,0 +1,144 @@ +from typing import Dict, Any, Optional, Union +from datetime import datetime +import json + +from .types import AuthenticatorType, UserInfo, AuthResult + + +def validate_email(email: str) -> bool: + """Validate email format.""" + if not email or '@' not in email: + return False + + parts = email.split('@') + if len(parts) != 2: + return False + + local, domain = parts + if not local or not domain: + return False + + # Basic domain validation + if '.' not in domain: + return False + + return True + + +def parse_authenticator_config( + config_json: Union[str, Dict[str, Any]], +) -> Dict[str, Any]: + """Parse authenticator configuration from JSON or dict.""" + if isinstance(config_json, str): + try: + return json.loads(config_json) + except json.JSONDecodeError: + return {} + elif isinstance(config_json, dict): + return config_json + else: + return {} + + +def create_error_response( + message: str, error_code: str = 'UNKNOWN_ERROR' +) -> AuthResult: + """Create a standardized error response.""" + return AuthResult(success=False, error=message, error_code=error_code) + + +def create_success_response( + user_info: UserInfo, + access_token: Optional[str] = None, + refresh_token: Optional[str] = None, +) -> AuthResult: + """Create a standardized success response.""" + return AuthResult( + success=True, + user_info=user_info, + access_token=access_token, + refresh_token=refresh_token, + ) + + +def normalize_user_info(user_data: Dict[str, Any], provider: str) -> UserInfo: + """Normalize user information from different providers.""" + # Extract common fields + email = ( + user_data.get('email') + or user_data.get('mail') + or user_data.get('userPrincipalName') + ) + name = user_data.get('name') or user_data.get('displayName') + + # If no name, try to construct from given/family names + if not name: + given_name = user_data.get('given_name') or user_data.get('givenName') + family_name = user_data.get('family_name') or user_data.get('surname') + if given_name and family_name: + name = f'{given_name} {family_name}' + elif given_name: + name = given_name + elif family_name: + name = family_name + + # Fallback to email prefix if no name + if not name and email: + name = email.split('@')[0] + + return UserInfo( + email=email, + name=name, + user_id=user_data.get('id') or user_data.get('sub'), + provider=provider, + avatar_url=user_data.get('picture') or user_data.get('avatar_url'), + additional_info=user_data, + ) + + +def get_authenticator_display_name(auth_type: AuthenticatorType) -> str: + """Get human-readable display name for authenticator type.""" + display_names = { + AuthenticatorType.EMAIL_PASSWORD: 'Email & Password', + AuthenticatorType.GOOGLE_OAUTH: 'Google OAuth', + AuthenticatorType.MICROSOFT_OAUTH: 'Microsoft OAuth', + AuthenticatorType.SAML: 'SAML', + AuthenticatorType.LDAP: 'LDAP', + } + return display_names.get(auth_type, str(auth_type)) + + +def is_oauth_provider(auth_type: AuthenticatorType) -> bool: + """Check if authenticator type is an OAuth provider.""" + oauth_types = {AuthenticatorType.GOOGLE_OAUTH, AuthenticatorType.MICROSOFT_OAUTH} + return auth_type in oauth_types + + +def extract_domain_from_email(email: str) -> Optional[str]: + """Extract domain from email address.""" + if not email or '@' not in email: + return None + return email.split('@')[1] + + +def format_scopes(scopes: list[str]) -> str: + """Format scopes list for OAuth requests.""" + if not scopes: + return '' + return ' '.join(scopes) + + +def log_authentication_attempt( + auth_type: AuthenticatorType, + email: str, + success: bool, + error_code: Optional[str] = None, +) -> Dict[str, Any]: + """Create authentication log entry.""" + return { + 'timestamp': datetime.now().isoformat(), + 'auth_type': str(auth_type), + 'email': email, + 'success': success, + 'error_code': error_code, + } diff --git a/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/__init__.py b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/__init__.py new file mode 100644 index 00000000..a742ada7 --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/__init__.py @@ -0,0 +1,4 @@ +from .authenticator import MicrosoftOAuthAuthenticator +from .config import MicrosoftOAuthConfig + +__all__ = ['MicrosoftOAuthAuthenticator', 'MicrosoftOAuthConfig'] diff --git a/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/authenticator.py b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/authenticator.py new file mode 100644 index 00000000..eca09d1b --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/authenticator.py @@ -0,0 +1,311 @@ +import json +import requests +from datetime import datetime +from typing import Dict, Any, Optional +from urllib.parse import urlencode, urlparse + +from ..types import AuthenticatorABC, AuthResult, TokenResult, HealthStatus, UserInfo +from .config import MicrosoftOAuthConfig + + +class MicrosoftOAuthAuthenticator(AuthenticatorABC): + """Microsoft OAuth 2.0 authenticator implementation.""" + + def __init__(self, config: MicrosoftOAuthConfig): + self.config = config + self.auth_url = f'{config.authority}{config.tenant_id}/oauth2/v2.0/authorize' + self.token_url = f'{config.authority}{config.tenant_id}/oauth2/v2.0/token' + self.graph_url = 'https://graph.microsoft.com/v1.0/me' + + @staticmethod + def validate_config_static(config: Dict[str, Any]) -> bool: + """Validate Microsoft OAuth configuration without creating an instance.""" + # Check required fields + required_fields = [ + 'client_id', + 'client_secret', + 'tenant_id', + 'redirect_uri', + 'client_redirect_success_url', + 'client_redirect_failure_url', + 'scopes', + ] + for field in required_fields: + if not config.get(field): + raise ValueError(f'{field} is required') + + # Validate redirect URI format + parsed_uri = urlparse(config['redirect_uri']) + if not parsed_uri.scheme or not parsed_uri.netloc: + raise ValueError('redirect_uri must be a valid URL with scheme and netloc') + + # Validate client redirect URLs + for url_field in ['client_redirect_success_url', 'client_redirect_failure_url']: + parsed_url = urlparse(config[url_field]) + if not parsed_url.scheme or not parsed_url.netloc: + raise ValueError( + f'{url_field} must be a valid URL with scheme and netloc' + ) + + # Validate scopes + scopes = config.get('scopes', []) + if not scopes or len(scopes) == 0: + raise ValueError('scopes array cannot be empty') + + # Validate authority URL + authority = config.get('authority') + if authority and not authority.startswith('https://'): + raise ValueError('authority must be a valid HTTPS URL') + + return True + + def authenticate(self, credentials: Dict[str, Any]) -> AuthResult: + """ + Authenticate user with Microsoft OAuth. + + Args: + credentials: {"authorization_code": "code", "state": "state"} + + Returns: + AuthResult: Authentication result + """ + authorization_code = credentials.get('authorization_code') + # state = credentials.get('state') + + if not authorization_code: + return AuthResult( + success=False, + error='Authorization code is required', + error_code='MISSING_AUTH_CODE', + ) + + # Exchange authorization code for access token + token_result = self._exchange_code_for_token(authorization_code) + + if not token_result.success: + return AuthResult( + success=False, + error=token_result.error, + error_code='TOKEN_EXCHANGE_FAILED', + ) + + # Get user info from Microsoft Graph + user_info = self._get_user_info_from_graph(token_result.access_token) + + if not user_info: + return AuthResult( + success=False, + error='Failed to retrieve user information from Microsoft Graph', + error_code='USER_INFO_FAILED', + ) + + return AuthResult( + success=True, + user_info=user_info, + access_token=token_result.access_token, + refresh_token=token_result.refresh_token, + ) + + def validate_config(self) -> bool: + """Validate the Microsoft OAuth configuration.""" + try: + # Check required fields + required_fields = [ + 'client_id', + 'client_secret', + 'tenant_id', + 'redirect_uri', + 'client_redirect_success_url', + 'client_redirect_failure_url', + 'scopes', + ] + for field in required_fields: + if not getattr(self.config, field, None): + return False + + # Validate redirect URI format + parsed_uri = urlparse(self.config.redirect_uri) + if not parsed_uri.scheme or not parsed_uri.netloc: + return False + + # Validate client redirect URLs + parsed_url = urlparse(self.config.client_redirect_success_url) + if not parsed_url.scheme or not parsed_url.netloc: + return False + + parsed_url = urlparse(self.config.client_redirect_failure_url) + if not parsed_url.scheme or not parsed_url.netloc: + return False + + # Validate scopes + if not self.config.scopes or len(self.config.scopes) == 0: + return False + + # Validate authority URL + if not self.config.authority.startswith('https://'): + return False + + return True + + except Exception: + return False + + def get_authorization_url(self, state: Optional[str] = None) -> Optional[str]: + """Get the Microsoft OAuth authorization URL.""" + if not state: + raise ValueError("State doesn't exist Microsoft Oauth") + + state_obj = json.loads(state) + + if state_obj['auth_id'] is None: + raise ValueError("Auth Id doesn't exist in Microsoft Oauth state") + + params = { + 'response_type': self.config.response_type, + 'client_id': self.config.client_id, + 'redirect_uri': self.config.redirect_uri, + 'scope': ' '.join(self.config.scopes), + 'state': state, + 'response_mode': self.config.response_mode, + } + + return f'{self.auth_url}?{urlencode(params)}' + + def handle_callback(self, callback_data: Dict[str, Any]) -> AuthResult: + """Handle Microsoft OAuth callback.""" + return self.authenticate(callback_data) + + def refresh_token(self, refresh_token: str) -> TokenResult: + """Refresh Microsoft OAuth access token.""" + if not refresh_token: + return TokenResult(success=False, error='Refresh token is required') + + data = { + 'grant_type': 'refresh_token', + 'refresh_token': refresh_token, + 'client_id': self.config.client_id, + 'client_secret': self.config.client_secret, + 'scope': ' '.join(self.config.scopes), + } + + try: + response = requests.post(self.token_url, data=data, timeout=10) + response.raise_for_status() + + token_data = response.json() + + return TokenResult( + success=True, + access_token=token_data.get('access_token'), + refresh_token=token_data.get( + 'refresh_token', refresh_token + ), # Keep old if not returned + expires_in=token_data.get('expires_in'), + ) + + except requests.exceptions.RequestException as e: + return TokenResult(success=False, error=f'Token refresh failed: {str(e)}') + except json.JSONDecodeError: + return TokenResult( + success=False, error='Invalid response from Microsoft token endpoint' + ) + + def logout(self, user_session: Dict[str, Any]) -> bool: + """Handle user logout for Microsoft OAuth.""" + # Microsoft doesn't have a simple token revocation endpoint like Google + # The tokens will expire naturally, but we can log the logout + return True + + def get_health_status(self) -> HealthStatus: + """Get health status of Microsoft OAuth authenticator.""" + is_healthy = True + details = { + 'config_valid': self.validate_config(), + 'tenant_id': self.config.tenant_id, + 'scopes': self.config.scopes, + } + + # Test Microsoft Graph API connectivity + try: + response = requests.get('https://graph.microsoft.com', timeout=5) + details['graph_api_reachable'] = response.status_code == 200 + except Exception: + details['graph_api_reachable'] = False + is_healthy = False + + return HealthStatus( + healthy=is_healthy, + message='Microsoft OAuth authenticator is operational' + if is_healthy + else 'Microsoft Graph API unreachable', + last_check=datetime.now(), + details=details, + ) + + def get_user_info(self, access_token: str) -> Optional[UserInfo]: + """Get user information from Microsoft Graph using access token.""" + return self._get_user_info_from_graph(access_token) + + def _exchange_code_for_token(self, authorization_code: str) -> TokenResult: + """Exchange authorization code for access token.""" + data = { + 'grant_type': 'authorization_code', + 'code': authorization_code, + 'client_id': self.config.client_id, + 'client_secret': self.config.client_secret, + 'redirect_uri': self.config.redirect_uri, + 'scope': ' '.join(self.config.scopes), + } + + try: + response = requests.post(self.token_url, data=data, timeout=10) + response.raise_for_status() + + token_data = response.json() + + return TokenResult( + success=True, + access_token=token_data.get('access_token'), + refresh_token=token_data.get('refresh_token'), + expires_in=token_data.get('expires_in'), + ) + + except requests.exceptions.RequestException as e: + return TokenResult(success=False, error=f'Token exchange failed: {str(e)}') + except json.JSONDecodeError: + return TokenResult( + success=False, error='Invalid response from Microsoft token endpoint' + ) + + def _get_user_info_from_graph(self, access_token: str) -> Optional[UserInfo]: + """Get user information from Microsoft Graph API.""" + try: + headers = {'Authorization': f'Bearer {access_token}'} + response = requests.get(self.graph_url, headers=headers, timeout=10) + response.raise_for_status() + + user_data = response.json() + + mail = user_data.get('mail') or user_data.get('userPrincipalName') + + return UserInfo( + email=mail, + first_name=( + user_data.get('givenName') + or (mail.split('@')[0] if mail and '@' in mail else None) + ), + last_name=user_data.get('surname'), + user_id=user_data.get('id'), + provider='microsoft', + avatar_url=None, # Microsoft Graph doesn't provide avatar URL directly + additional_info={ + 'display_name': user_data.get('displayName'), + 'job_title': user_data.get('jobTitle'), + 'office_location': user_data.get('officeLocation'), + 'preferred_language': user_data.get('preferredLanguage'), + 'user_principal_name': user_data.get('userPrincipalName'), + }, + ) + + except Exception: + return None diff --git a/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/config.py b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/config.py new file mode 100644 index 00000000..78e60e2f --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/microsoft_oauth/config.py @@ -0,0 +1,17 @@ +from dataclasses import dataclass, field + + +@dataclass +class MicrosoftOAuthConfig: + client_id: str + client_secret: str + tenant_id: str + redirect_uri: str + client_redirect_success_url: str + client_redirect_failure_url: str + scopes: list[str] = field( + default_factory=lambda: ['openid', 'email', 'profile', 'User.Read'] + ) + authority: str = 'https://login.microsoftonline.com/' + response_type: str = 'code' + response_mode: str = 'query' diff --git a/wavefront/server/plugins/authenticator/authenticator/types.py b/wavefront/server/plugins/authenticator/authenticator/types.py new file mode 100644 index 00000000..2dafd706 --- /dev/null +++ b/wavefront/server/plugins/authenticator/authenticator/types.py @@ -0,0 +1,182 @@ +from enum import Enum +from abc import ABC, abstractmethod +from typing import Any, Generic, TypeVar, Dict, Optional +from dataclasses import dataclass +from datetime import datetime + + +@dataclass +class Meta: + status: str + message: str + code: int + + +T = TypeVar('T') + + +@dataclass +class AuthenticatorResult(Generic[T]): + meta: Meta + result: T + + +@dataclass +class UserInfo: + email: str + first_name: str + last_name: Optional[str] = None + user_id: Optional[str] = None + provider: Optional[str] = None + avatar_url: Optional[str] = None + additional_info: Optional[Dict[str, Any]] = None + + +@dataclass +class AuthResult: + success: bool + user_info: Optional[UserInfo] = None + access_token: Optional[str] = None + refresh_token: Optional[str] = None + error: Optional[str] = None + error_code: Optional[str] = None + redirect_url: Optional[str] = None + + +@dataclass +class TokenResult: + success: bool + access_token: Optional[str] = None + refresh_token: Optional[str] = None + expires_in: Optional[int] = None + error: Optional[str] = None + + +@dataclass +class HealthStatus: + healthy: bool + message: str + last_check: datetime + details: Optional[Dict[str, Any]] = None + + +# Result type aliases +BooleanResult = AuthenticatorResult[bool] +AuthenticationResult = AuthenticatorResult[AuthResult] +TokenRefreshResult = AuthenticatorResult[TokenResult] +HealthCheckResult = AuthenticatorResult[HealthStatus] + + +class AuthenticatorType(Enum): + EMAIL_PASSWORD = 'email_password' + GOOGLE_OAUTH = 'google_oauth' + MICROSOFT_OAUTH = 'microsoft_oauth' + SAML = 'saml' + LDAP = 'ldap' + + def __str__(self): + return self.value + + +class AuthenticatorABC(ABC): + """Abstract base class for all authenticator implementations.""" + + @abstractmethod + def authenticate(self, credentials: Dict[str, Any]) -> AuthResult: + """ + Authenticate user with provided credentials. + + Args: + credentials: Dictionary containing authentication data + - For email_password: {"email": "user@example.com", "password": "secret"} + - For OAuth: {"authorization_code": "code", "state": "state"} + + Returns: + AuthResult: Authentication result with user info and tokens + """ + pass + + @abstractmethod + def validate_config(self) -> bool: + """ + Validate the authenticator configuration. + + Returns: + bool: True if configuration is valid, False otherwise + """ + pass + + @abstractmethod + def get_authorization_url(self, state: Optional[str] = None) -> Optional[str]: + """ + Get the authorization URL for OAuth flow. + + Args: + state: Optional state parameter for OAuth flow + + Returns: + Optional[str]: Authorization URL for OAuth providers, None for email/password + """ + pass + + @abstractmethod + def handle_callback(self, callback_data: Dict[str, Any]) -> AuthResult: + """ + Handle OAuth callback from provider. + + Args: + callback_data: Dictionary containing callback data (code, state, etc.) + + Returns: + AuthResult: Authentication result + """ + pass + + @abstractmethod + def refresh_token(self, refresh_token: str) -> TokenResult: + """ + Refresh access token using refresh token. + + Args: + refresh_token: Refresh token from previous authentication + + Returns: + TokenResult: Token refresh result + """ + pass + + @abstractmethod + def logout(self, user_session: Dict[str, Any]) -> bool: + """ + Handle user logout. + + Args: + user_session: Current user session data + + Returns: + bool: True if logout successful, False otherwise + """ + pass + + @abstractmethod + def get_health_status(self) -> HealthStatus: + """ + Get the health status of the authenticator. + + Returns: + HealthStatus: Current health status + """ + pass + + @abstractmethod + def get_user_info(self, access_token: str) -> Optional[UserInfo]: + """ + Get user information using access token. + + Args: + access_token: Valid access token + + Returns: + Optional[UserInfo]: User information or None if failed + """ + pass diff --git a/wavefront/server/plugins/authenticator/pyproject.toml b/wavefront/server/plugins/authenticator/pyproject.toml new file mode 100644 index 00000000..26c57bc9 --- /dev/null +++ b/wavefront/server/plugins/authenticator/pyproject.toml @@ -0,0 +1,24 @@ +[project] +name = "authenticator" +version = "0.1.0" +description = "Authenticator plugin" +readme = "README.md" +requires-python = ">=3.11" + +dependencies = [ + "requests>=2.25.0", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["authenticator"] diff --git a/wavefront/server/plugins/datasource/README.md b/wavefront/server/plugins/datasource/README.md new file mode 100644 index 00000000..e69de29b diff --git a/wavefront/server/plugins/datasource/datasource/__init__.py b/wavefront/server/plugins/datasource/datasource/__init__.py new file mode 100644 index 00000000..6ebb9f23 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/__init__.py @@ -0,0 +1,122 @@ +from .types import ( + BooleanResult, + DataSourceABC, + DataSourceType, + TableListResult, + QueryResult, +) +from typing import Any, Optional, List, Dict + +from .bigquery import BigQueryPlugin, BigQueryConfig +from .redshift import RedshiftPlugin, RedshiftConfig +from .helper import construct_meta +from .odata_parser import ODataQueryParser + + +class DatasourcePlugin(DataSourceABC): + def __init__( + self, + datasource_type: DataSourceType, + config: BigQueryConfig | RedshiftConfig, + ): + self.datasource_type = datasource_type + self.config = config + self.datasource = self.__get_datasource() + + def __get_datasource(self) -> DataSourceABC: + if self.datasource_type == DataSourceType.AWS_REDSHIFT: + self.odata_parser = ODataQueryParser(type='sql', dynamic_var_char=':') + if not isinstance(self.config, RedshiftConfig): + raise ValueError(f'Invalid config type: {type(self.config)}') + return RedshiftPlugin(self.config) + elif self.datasource_type == DataSourceType.GCP_BIGQUERY: + self.odata_parser = ODataQueryParser(type='sql', dynamic_var_char='@') + if not isinstance(self.config, BigQueryConfig): + raise ValueError(f'Invalid config type: {type(self.config)}') + return BigQueryPlugin(self.config) + else: + raise ValueError(f'Invalid datasource type: {self.datasource_type}') + + async def test_connection(self) -> BooleanResult: + return BooleanResult( + result=await self.datasource.test_connection(), + meta=construct_meta(status='success', code=1), + ) + + def get_schema(self) -> dict: + return self.datasource.get_schema() + + def get_table_names(self, **kwargs) -> TableListResult: + result = self.datasource.get_table_names(**kwargs) + return TableListResult( + result=result, meta=construct_meta(status='success', code=1) + ) + + def fetch_data( + self, + table_name: str, + projection: Optional[str] = '*', + filter: Optional[str] = None, + join: Optional[str] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 10, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> QueryResult: + where_clause, params = self.odata_parser.prepare_odata_filter(filter) + join_query, table_aliases, join_where_clause, join_params = ( + self.odata_parser.prepare_odata_joins(join, table_name) + ) + + where_clause = where_clause if where_clause else 'true' + if join_where_clause: + where_clause = f'{where_clause} AND {join_where_clause}' + params = (params if params else {}) | join_params + + result = self.datasource.fetch_data( + table_names=[table_name] + table_aliases, + projection=projection, + where_clause=where_clause, + join_query=join_query if join_query else None, + params=params, + offset=offset, + limit=limit, + order_by=order_by, + group_by=group_by, + ) + return QueryResult(result=result, meta=construct_meta(status='success', code=1)) + + def insert_rows_json(self, table_name: str, data: List[Dict[str, Any]]): + return self.datasource.insert_rows_json(table_name, data) + + async def execute_query( + self, query: str, use_legacy_sql: bool = False, dry_run: bool = False, **kwargs + ) -> Any: + return await self.datasource.execute_query( + query, use_legacy_sql, dry_run, **kwargs + ) + + async def execute_dynamic_query( + self, + query: List[Dict[str, Any]], + rls_filter: Optional[str] = None, + filter: Optional[str] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 100, + params: Optional[Dict[str, Any]] = None, + ): + odata_filter, odata_params = self.odata_parser.prepare_odata_filter(filter) + odata_data_filter, odata_data_params = self.odata_parser.prepare_odata_filter( + rls_filter + ) + result_by_query = await self.datasource.execute_dynamic_query( + query, + offset, + limit, + odata_filter, + odata_params, + odata_data_filter, + odata_data_params, + params, + ) + return result_by_query diff --git a/wavefront/server/plugins/datasource/datasource/bigquery/__init__.py b/wavefront/server/plugins/datasource/datasource/bigquery/__init__.py new file mode 100644 index 00000000..40497bca --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/bigquery/__init__.py @@ -0,0 +1,152 @@ +from typing import Any, Dict, List, Optional +from ..types import DataSourceABC +from .config import BigQueryConfig +from flo_cloud.gcp.bigquery import BigQueryClient +import asyncio + + +class BigQueryPlugin(DataSourceABC): + def __init__(self, config: BigQueryConfig): + self.config = config + self.client = BigQueryClient( + project_id=config.project_id, + location=config.location, + credentials_path=config.credentials_path, + credentials_json=config.credentials_json, + ) + self.table_prefix = f'{config.project_id}.{config.dataset_id}.' + + async def test_connection(self) -> bool: + return await self.client.test_connection() + + def get_schema(self, table_id: str) -> dict: + table_info = self.client.get_table_info(self.config.dataset_id, table_id) + return table_info['schema'] or {} + + def get_table_names(self, **kwargs) -> list[str]: + dataset_id = kwargs.get('dataset_id', self.config.dataset_id) + tables = self.client.list_tables(dataset_id) + return [table.table_id for table in tables] + + def fetch_data( + self, + table_names: List[str], + projection: str = '*', + where_clause: str = 'true', + join_query: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + offset: int = 0, + limit: int = 1000, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + result = self.client.execute_query_to_dict( + projection=projection, + table_prefix=self.table_prefix, + table_names=table_names, + where_clause=where_clause, + join_query=join_query, + params=params, + limit=limit, + offset=offset, + order_by=order_by, + group_by=group_by, + ) + return result + + def insert_rows_json(self, table_name: str, data: List[Dict[str, Any]]): + result = self.client.insert_rows_json(f'{self.table_prefix}{table_name}', data) + return result + + async def execute_query( + self, query: str, use_legacy_sql: bool = False, dry_run: bool = False, **kwargs + ) -> Any: + # Set default dataset for unqualified table names using QueryJobConfig + dataset_path = self.table_prefix.rstrip('.') + kwargs['default_dataset'] = dataset_path + + result = await self.client.execute_query( + query, use_legacy_sql, dry_run, **kwargs + ) + return result + + async def execute_dynamic_query( + self, + queries: List[Dict[str, Any]], + offset: Optional[int] = 0, + limit: Optional[int] = 100, + odata_filter: Optional[str] = None, + odata_params: Optional[Dict[str, Any]] = None, + odata_data_filter: Optional[str] = None, + odata_data_params: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + ): + results = {} + tasks = [] + + for query_obj in queries: + query_to_execute = query_obj.get('query', '') + query_params = query_obj.get('parameters', {}) + query_id = query_obj.get('id') + if not query_id: + raise ValueError('Query ID is required') + + params_key = [params['name'] for params in query_params] + params_to_execute = dict() + + # Handle case when params is None + if params is None: + params = {} + + for key in params_key: + if key not in params: + raise ValueError(f'Missing parameter: {key} for query {query_id}') + params_to_execute[key] = params[key] + + if odata_params: + params_to_execute.update(odata_params) + if odata_data_params: + params_to_execute.update(odata_data_params) + + # Replace placeholders in the query + query_to_execute = query_to_execute.replace( + '{{rls}}', f'{odata_data_filter}' if odata_data_filter else 'TRUE' + ) + query_to_execute = query_to_execute.replace( + '{{filters}}', f'{odata_filter}' if odata_filter else 'TRUE' + ) + # adding limit and offset to the query + query_to_execute += f' LIMIT {limit} OFFSET {offset}' + + # Create async task for query execution + task = asyncio.create_task( + self.client.execute_query(query_to_execute, params=params_to_execute) + ) + tasks.append((query_obj['id'], task)) + + for query_id, task in tasks: + try: + # Await the async task to get the QueryJob + query_job = await task + + query_result = list(query_job.result()) + formatted_result = [dict(row.items()) for row in query_result] + + results[query_id] = { + 'status': 'success', + 'error': None, + 'description': f'Query {query_id} executed successfully', + 'result': formatted_result, + } + except Exception as e: + results[query_id] = { + 'status': 'error', + 'error': str(e), + 'description': f'Error executing query {query_id}', + 'result': [], + } + + return results + + +__all__ = ['BigQueryPlugin', 'BigQueryConfig'] diff --git a/wavefront/server/plugins/datasource/datasource/bigquery/config.py b/wavefront/server/plugins/datasource/datasource/bigquery/config.py new file mode 100644 index 00000000..d427ae23 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/bigquery/config.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class BigQueryConfig: + project_id: str + dataset_id: str + location: str + credentials_path: Optional[str] = None + credentials_json: Optional[dict] = None diff --git a/wavefront/server/plugins/datasource/datasource/helper.py b/wavefront/server/plugins/datasource/datasource/helper.py new file mode 100644 index 00000000..84d24b65 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/helper.py @@ -0,0 +1,5 @@ +from .types import Meta + + +def construct_meta(status: str, message: str = '', code: int = 1) -> Meta: + return Meta(status=status, message=message, code=code) diff --git a/wavefront/server/plugins/datasource/datasource/odata_parser.py b/wavefront/server/plugins/datasource/datasource/odata_parser.py new file mode 100644 index 00000000..6a911ee9 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/odata_parser.py @@ -0,0 +1,823 @@ +from datetime import datetime +from typing import Any, Tuple, List, Dict, Optional +from enum import Enum, auto +from abc import ABC, abstractmethod + + +class TokenType(Enum): + FIELD = auto() + OPERATOR = auto() + VALUE = auto() + LOGICAL_OP = auto() + LPAREN = auto() + RPAREN = auto() + EOF = auto() + DOLLAR = auto() + EQUALS = auto() + COMMA = auto() + AMPERSAND = auto() + + +class Token: + def __init__(self, type: TokenType, value: str, position: int): + self.type = type + self.value = value + self.position = position + + def __str__(self): + return f"Token({self.type}, '{self.value}', pos={self.position})" + + +class Lexer: + """Lexical analyzer for OData filter expressions""" + + def __init__(self, text: str): + self.text = text + self.position = 0 + self.current_char = self.text[0] if text else None + + def advance(self): + self.position += 1 + if self.position >= len(self.text): + self.current_char = None + else: + self.current_char = self.text[self.position] + + def peek(self, n: int = 1) -> Optional[str]: + peek_pos = self.position + n + if peek_pos >= len(self.text): + return None + return self.text[peek_pos] + + def skip_whitespace(self): + while self.current_char and self.current_char.isspace(): + self.advance() + + def read_identifier(self) -> str: + """Read field names and operators""" + result = '' + while self.current_char and ( + self.current_char.isalnum() or self.current_char in '._$' + ): + result += self.current_char + self.advance() + return result + + def read_string(self, quote_char: str) -> str: + """Read quoted string values""" + result = quote_char + self.advance() # consume opening quote + + while self.current_char and self.current_char != quote_char: + if self.current_char == '\\' and self.peek() == quote_char: + result += self.current_char + self.advance() + result += self.current_char + self.advance() + else: + result += self.current_char + self.advance() + + if self.current_char == quote_char: + result += self.current_char + self.advance() + + return result + + def read_array(self) -> str: + """Read array values like [1,2,3]""" + result = '[' + self.advance() # consume opening bracket + + while self.current_char and self.current_char != ']': + result += self.current_char + self.advance() + + if self.current_char == ']': + result += self.current_char + self.advance() + + return result + + def read_number_or_identifier(self) -> str: + """Read numbers or unquoted identifiers including datetime strings""" + result = '' + while self.current_char and ( + self.current_char.isalnum() or self.current_char in '._-:T' + ): + result += self.current_char + self.advance() + return result + + def get_next_token(self) -> Token: + while self.current_char: + if self.current_char.isspace(): + self.skip_whitespace() + continue + + if self.current_char == '(': + token = Token(TokenType.LPAREN, self.current_char, self.position) + self.advance() + return token + + if self.current_char == ')': + token = Token(TokenType.RPAREN, self.current_char, self.position) + self.advance() + return token + + if self.current_char == '$': + # Check if it's part of a logical operator like $and, $or + if self.peek() and self.peek().isalpha(): + # Read the full identifier including the $ + identifier = self.current_char + self.advance() + while self.current_char and ( + self.current_char.isalnum() or self.current_char in '_' + ): + identifier += self.current_char + self.advance() + + # Check if it's a logical operator + if identifier in ['$and', '$or']: + return Token( + TokenType.LOGICAL_OP, + identifier, + self.position - len(identifier), + ) + else: + # For OData query parameters like $expand, $join, etc. + return Token( + TokenType.DOLLAR, + identifier, + self.position - len(identifier), + ) + else: + # Single $ character + token = Token(TokenType.DOLLAR, self.current_char, self.position) + self.advance() + return token + + if self.current_char == '=': + token = Token(TokenType.EQUALS, self.current_char, self.position) + self.advance() + return token + + if self.current_char == ',': + token = Token(TokenType.COMMA, self.current_char, self.position) + self.advance() + return token + + if self.current_char == '&': + token = Token(TokenType.AMPERSAND, self.current_char, self.position) + self.advance() + return token + + if self.current_char in '\'"': + value = self.read_string(self.current_char) + return Token(TokenType.VALUE, value, self.position - len(value)) + + if self.current_char == '[': + value = self.read_array() + return Token(TokenType.VALUE, value, self.position - len(value)) + + if self.current_char.isalpha() or self.current_char == '_': + identifier = self.read_identifier() + + # Check if it's a logical operator + if identifier in ['$and', '$or', 'AND', 'OR']: + return Token( + TokenType.LOGICAL_OP, + identifier, + self.position - len(identifier), + ) + + # Check if it's an operator + operators = ['eq', 'gt', 'lt', 'lte', 'gte', 'contains', 'in'] + if identifier in operators: + return Token( + TokenType.OPERATOR, identifier, self.position - len(identifier) + ) + + # Must be a field name + return Token( + TokenType.FIELD, identifier, self.position - len(identifier) + ) + + if self.current_char.isdigit() or self.current_char in '._-:T': + value = self.read_number_or_identifier() + return Token(TokenType.VALUE, value, self.position - len(value)) + + raise ValueError( + f'Unexpected character: {self.current_char} at position {self.position}' + ) + + return Token(TokenType.EOF, '', self.position) + + +class ODataParserABC(ABC): + @abstractmethod + def prepare_odata_filter( + self, filter_expr: str, dynamic_var_char: str = '@' + ) -> Tuple[str, dict]: + pass + + @abstractmethod + def prepare_odata_joins( + self, odata_query: str, parent_table: str + ) -> Tuple[str, List[str], str, Dict[str, Any]]: + pass + + +class ODataQueryParser: + """Unified parser for OData query parameters""" + + def __init__(self, type: str, dynamic_var_char: str = '@'): + self.type = type + self.dynamic_var_char = dynamic_var_char + + if self.type == 'sql': + self.parser = SQLODataParser(self.dynamic_var_char) + else: + raise ValueError(f'Invalid type: {self.type}') + + def prepare_odata_filter(self, odata_filter: str) -> Dict[str, Any]: + return self.parser.prepare_odata_filter(odata_filter) + + def prepare_odata_joins( + self, odata_joins: str, parent_table: str + ) -> Tuple[str, List[str], str, Dict[str, Any]]: + return self.parser.prepare_odata_joins(odata_joins, parent_table) + + +class SQLFilterParser: + """Unified parser for OData filter expressions and query parameters""" + + def __init__(self, lexer: Lexer, dynamic_var_char: str = '@'): + self.lexer = lexer + self.current_token = self.lexer.get_next_token() + self.params = {} + self.param_count = {} + self.dynamic_var_char = dynamic_var_char + + def eat(self, token_type: TokenType): + if self.current_token.type == token_type: + self.current_token = self.lexer.get_next_token() + else: + raise ValueError(f'Expected {token_type}, got {self.current_token.type}') + + def parse_filter_expression(self) -> str: + """Parse a complete filter expression""" + if self.current_token.type == TokenType.EOF: + return '' + + return self.parse_logical_expression() + + def parse_logical_expression(self) -> str: + """Parse AND/OR expressions""" + left = self.parse_comparison_expression() + + while self.current_token.type == TokenType.LOGICAL_OP: + op = self.current_token.value + self.eat(TokenType.LOGICAL_OP) + right = self.parse_comparison_expression() + + # Convert OData logical operators to SQL + sql_op = 'AND' if op in ['$and', 'AND'] else 'OR' + left = f'{left} {sql_op} {right}' + + return left + + def parse_comparison_expression(self) -> str: + """Parse comparison expressions like field eq value""" + if self.current_token.type == TokenType.LPAREN: + self.eat(TokenType.LPAREN) + expr = self.parse_logical_expression() + self.eat(TokenType.RPAREN) + return f'({expr})' + + if self.current_token.type != TokenType.FIELD: + raise ValueError(f'Expected field name, got {self.current_token.type}') + + field = self.current_token.value + self.eat(TokenType.FIELD) + + if self.current_token.type != TokenType.OPERATOR: + raise ValueError(f'Expected operator, got {self.current_token.type}') + + operator = self.current_token.value + self.eat(TokenType.OPERATOR) + + if self.current_token.type != TokenType.VALUE: + raise ValueError(f'Expected value, got {self.current_token.type}') + + value = self.current_token.value + self.eat(TokenType.VALUE) + + return self.build_comparison(field, operator, value) + + def build_comparison(self, field: str, operator: str, value: str) -> str: + """Build SQL comparison expression with parameter binding""" + ops = { + 'eq': '=', + 'gt': '>', + 'lt': '<', + 'lte': '<=', + 'gte': '>=', + 'contains': 'LIKE', + 'in': 'IN', + } + + if operator not in ops: + raise ValueError(f'Unsupported operator: {operator}') + + # Generate parameter key - handle table aliases (e.g., "a.id" -> "a_id") + if '.' in field: + # For table aliases like "a.id", use "a_id" as parameter key + table_alias, column_name = field.split('.', 1) + base_param_key = f'{table_alias}_{column_name}' + else: + base_param_key = field + + if base_param_key in self.param_count: + self.param_count[base_param_key] += 1 + param_key = f'{base_param_key}_{self.param_count[base_param_key]}' + else: + self.param_count[base_param_key] = 0 + param_key = base_param_key + + sql_op = ops[operator] + + if operator == 'contains': + parsed_value = self.parse_value(value) + self.params[param_key] = f'%{parsed_value}%' + return f'{field} {sql_op} {self.dynamic_var_char}{param_key}' + + elif operator == 'in': + # Parse array values + items = value.strip('[]').split(',') + parsed_values = [v.strip().strip('\'"') for v in items] + + placeholder_keys = [] + for idx, val in enumerate(parsed_values): + item_key = f'{param_key}_{idx}' + self.params[item_key] = val + placeholder_keys.append(f'{self.dynamic_var_char}{item_key}') + + return f"{field} IN ({', '.join(placeholder_keys)})" + + else: + parsed_value = self.parse_value(value) + self.params[param_key] = parsed_value + return f'{field} {sql_op} {self.dynamic_var_char}{param_key}' + + def parse_value(self, value: str) -> Any: + """Parse value to appropriate Python type""" + # Remove quotes if present + was_quoted = False + if (value.startswith("'") and value.endswith("'")) or ( + value.startswith('"') and value.endswith('"') + ): + value = value[1:-1] + was_quoted = True + + # If it was quoted, treat as string unless it's a datetime + if was_quoted: + # Try to parse as datetime first + try: + return datetime.fromisoformat(value) + except ValueError: + pass + return value + + # Try to parse as number + if value.isdigit(): + return int(value) + + # Try to parse as float + try: + return float(value) + except ValueError: + pass + + # Try to parse as datetime + try: + return datetime.fromisoformat(value) + except ValueError: + pass + + return value + + def parse_odata_query(self) -> Dict[str, Any]: + """Parse OData query string and extract parameters""" + if self.current_token.type == TokenType.EOF: + return {} + + result = {} + + while self.current_token.type != TokenType.EOF: + if self.current_token.type == TokenType.AMPERSAND: + self.eat(TokenType.AMPERSAND) + continue + + if self.current_token.type == TokenType.DOLLAR: + # The DOLLAR token contains the full parameter name (e.g., '$expand', '$join') + param_name = self.current_token.value[1:] # Remove the '$' prefix + self.eat(TokenType.DOLLAR) + + if self.current_token.type == TokenType.EQUALS: + self.eat(TokenType.EQUALS) + param_value = self.parse_parameter_value() + + if param_name == 'expand': + expand_tables = self.parse_expand_value(param_value) + if expand_tables: + result['expand'] = expand_tables + # Also add the old format for backward compatibility + result['expand_tables'] = [ + table['name'] for table in expand_tables + ] + elif param_name == 'join': + join_columns = self.parse_join_value(param_value) + if join_columns: + result['join'] = join_columns + elif param_name in ['filter', 'select', 'orderby', 'top', 'skip']: + # Skip other parameters for now + pass + else: + raise ValueError( + f"Expected '=' after parameter name, got {self.current_token.type}" + ) + else: + # Skip unknown tokens + self.current_token = self.lexer.get_next_token() + + return result + + def parse_parameter_value(self) -> str: + """Parse parameter value until next parameter or end""" + value_parts = [] + paren_depth = 0 + + while self.current_token.type != TokenType.EOF: + if self.current_token.type == TokenType.AMPERSAND and paren_depth == 0: + # Only stop at & if we're not inside parentheses + break + elif self.current_token.type == TokenType.DOLLAR and paren_depth == 0: + # Only stop at $ if we're not inside parentheses + break + elif self.current_token.type == TokenType.LPAREN: + paren_depth += 1 + value_parts.append(self.current_token.value) + elif self.current_token.type == TokenType.RPAREN: + paren_depth -= 1 + value_parts.append(self.current_token.value) + elif self.current_token.type in [ + TokenType.VALUE, + TokenType.FIELD, + TokenType.COMMA, + TokenType.OPERATOR, + TokenType.EQUALS, + ]: + value_parts.append(self.current_token.value) + elif self.current_token.type == TokenType.DOLLAR: + # When inside parentheses, include $ tokens (like $filter=) + value_parts.append(self.current_token.value) + else: + # Skip other tokens + pass + + self.current_token = self.lexer.get_next_token() + + return ''.join(value_parts) + + def parse_expand_value(self, expand_value: str) -> List[Dict[str, Any]]: + """Parse $expand parameter to extract table names and their filters""" + tables = [] + + # Split by comma, but be careful about commas inside parentheses + parts = self._split_expand_parts(expand_value) + + for part in parts: + # Extract the main table name (before any parentheses) + table_name = part.split('(')[0].strip() + if not table_name: + continue + + table_info = {'name': table_name, 'filters': []} + + # Add the main table first + tables.append(table_info) + + # Check for nested expands and filters within parentheses + if '(' in part and ')' in part: + # Extract content within parentheses + nested_start = part.find('(') + 1 + nested_end = part.rfind(')') + if nested_start < nested_end: + nested_content = part[nested_start:nested_end] + + # Parse nested content for filters on the main table + # Only process filters if they are directly on this table (not nested) + if ( + '$filter=' in nested_content + and '$expand=' not in nested_content + ): + filter_start = ( + nested_content.find('$filter=') + 8 + ) # Remove '$filter=' prefix + filter_end = nested_content.find('&', filter_start) + if filter_end == -1: + filter_end = len(nested_content) + filter_expr = nested_content[filter_start:filter_end].strip() + if filter_expr: + # Fix the filter expression by adding spaces between field, operator, and value + fixed_filter_expr = self._fix_filter_expression(filter_expr) + table_info['filters'].append(fixed_filter_expr) + + # Parse nested expand content - remove $expand= prefix + if '$expand=' in nested_content: + expand_start = ( + nested_content.find('$expand=') + 8 + ) # Remove '$expand=' prefix + expand_end = nested_content.find('&', expand_start) + if expand_end == -1: + expand_end = len(nested_content) + nested_expand_content = nested_content[expand_start:expand_end] + # Recursively parse nested content + nested_tables = self._split_expand_parts(nested_expand_content) + for nested_table in nested_tables: + nested_table = nested_table.strip() + if nested_table and not nested_table.startswith('$'): + # Extract clean table name from nested table (remove any filter expressions) + clean_nested_table = nested_table.split('(')[0].strip() + nested_info = { + 'name': clean_nested_table, + 'filters': [], + } + + # Check if this nested table has filters + if '(' in nested_table and ')' in nested_table: + nested_table_start = nested_table.find('(') + 1 + nested_table_end = nested_table.rfind(')') + if nested_table_start < nested_table_end: + nested_table_content = nested_table[ + nested_table_start:nested_table_end + ] + if '$filter=' in nested_table_content: + filter_start = ( + nested_table_content.find('$filter=') + + 8 + ) + filter_end = nested_table_content.find( + '&', filter_start + ) + if filter_end == -1: + filter_end = len(nested_table_content) + filter_expr = nested_table_content[ + filter_start:filter_end + ].strip() + if filter_expr: + fixed_filter_expr = ( + self._fix_filter_expression( + filter_expr + ) + ) + nested_info['filters'].append( + fixed_filter_expr + ) + + # Add nested tables after the main table + tables.append(nested_info) + + return tables + + def _fix_filter_expression(self, filter_expr: str) -> str: + """Fix filter expression by adding spaces between field, operator, and value""" + # Common OData operators + operators = ['eq', 'gt', 'lt', 'lte', 'gte', 'contains', 'in', 'ne'] + + for operator in operators: + if operator in filter_expr: + # Find the operator and add spaces around it + op_index = filter_expr.find(operator) + if op_index > 0: + # Check if there's already a space before the operator + if not filter_expr[op_index - 1].isspace(): + filter_expr = ( + filter_expr[:op_index] + ' ' + filter_expr[op_index:] + ) + op_index += 1 # Adjust index after adding space + # Check if there's already a space after the operator + if ( + op_index + len(operator) < len(filter_expr) + and not filter_expr[op_index + len(operator)].isspace() + ): + filter_expr = ( + filter_expr[: op_index + len(operator)] + + ' ' + + filter_expr[op_index + len(operator) :] + ) + break + + return filter_expr + + def get_expand_table_names(self, expand_value: str) -> List[str]: + """Get just the table names for backward compatibility""" + tables = self.parse_expand_value(expand_value) + return [table['name'] for table in tables] + + def parse_join_value(self, join_value: str) -> List[str]: + """Parse $join parameter to extract column names""" + if not join_value: + return [] + + # Split by comma and clean up + columns = [col.strip() for col in join_value.split(',') if col.strip()] + return columns + + def _split_expand_parts(self, expand_value: str) -> List[str]: + """Split expand value by comma, but respect parentheses""" + parts = [] + current_part = '' + paren_depth = 0 + + for char in expand_value: + if char == '(': + paren_depth += 1 + current_part += char + elif char == ')': + paren_depth -= 1 + current_part += char + elif char == ',' and paren_depth == 0: + # Only split on comma if we're not inside parentheses + parts.append(current_part.strip()) + current_part = '' + else: + current_part += char + + # Add the last part + if current_part.strip(): + parts.append(current_part.strip()) + + return parts + + +class SQLODataParser(ODataParserABC): + def __init__(self, dynamic_var_char: str = '@'): + self.dynamic_var_char = dynamic_var_char + + def prepare_odata_filter(self, filter_expr: str) -> Tuple[str, dict]: + """Parses an OData-like filter expression and converts it into a SQL-like query with parameters.""" + if not filter_expr: + return None, None + + lexer = Lexer(filter_expr) + parser = SQLFilterParser(lexer, self.dynamic_var_char) + + sql_expr = parser.parse_filter_expression() + + if not sql_expr: + raise ValueError('Invalid filter expression') + + return sql_expr, parser.params + + def prepare_odata_joins( + self, odata_query: str, parent_table: str + ) -> Tuple[str, List[str], str, Dict[str, Any]]: + """Parses OData query parameters including $expand and $join and generates SQL JOIN statements.""" + if not odata_query: + return '', [], '', {} + + lexer = Lexer(odata_query) + query_parser = SQLFilterParser(lexer, self.dynamic_var_char) + + query_params = query_parser.parse_odata_query() + + expand_tables = query_params.get('expand', []) + join_columns = query_params.get('join', []) + + join_builder = JoinBuilder(self.dynamic_var_char) + join_sql, table_aliases, where_clause, filter_params = join_builder.build_joins( + expand_tables, join_columns, parent_table + ) + + return join_sql, table_aliases, where_clause, filter_params + + +class JoinBuilder: + """Builds SQL JOIN statements from OData expand and join parameters""" + + def __init__(self, dynamic_var_char: str = '@'): + self.dynamic_var_char = dynamic_var_char + + def build_joins( + self, + expand_tables: List[Dict[str, Any]], + join_columns: List[str], + parent_table: str, + ) -> Tuple[str, List[str], str, Dict[str, Any]]: + """ + Build SQL JOIN statements with filters + + Args: + expand_tables: List of table info dicts with names and filters + join_columns: List of join columns (e.g., ['customer_id', 'order_id']) + parent_table: The parent table name + + Returns: + Tuple of (join_sql, table_aliases, where_clause, filter_params) + """ + if not expand_tables: + return '', [], '', {} + + join_statements = [] + table_aliases = [] + where_clauses = [] + all_filter_params = {} + + # Extract table names for backward compatibility + table_names = [ + table['name'] if isinstance(table, dict) else table + for table in expand_tables + ] + + # If no join columns provided, use 'id' for all joins + if not join_columns: + join_columns = ['id'] * len(table_names) + + # If only one column is provided, use it for all joins + if len(join_columns) == 1: + join_columns = join_columns * len(table_names) + + # Ensure we have enough columns for all tables + while len(join_columns) < len(table_names): + join_columns.append(join_columns[-1] if join_columns else 'id') + + # Build joins + for i, table_info in enumerate(expand_tables): + if isinstance(table_info, dict): + table = table_info['name'] + filters = table_info.get('filters', []) + else: + # Backward compatibility for string table names + table = table_info + filters = [] + + # Determine the join columns for this table + # Special case: if we have exactly 2 join columns and only 1 table, use different column names + if len(table_names) == 1 and len(join_columns) == 2: + parent_column = join_columns[0] + table_column = join_columns[1] + else: + # General case: use the same column name on both sides + if i < len(join_columns): + join_column = join_columns[i] + else: + # Use the last column if not enough provided + join_column = join_columns[-1] if join_columns else 'id' + parent_column = join_column + table_column = join_column + + # Build the JOIN statement + if i == 0: + # First join: parent_table -> table + join_stmt = f'JOIN {table}\n ON {parent_table}.{parent_column} = {table}.{table_column}' + else: + # Subsequent joins: previous_table -> current_table + prev_table = table_names[i - 1] + join_stmt = f'JOIN {table}\n ON {prev_table}.{parent_column} = {table}.{table_column}' + + join_statements.append(join_stmt) + table_aliases.append(table) + + # Process filters for this table + for filter_expr in filters: + if filter_expr: + # Parse the filter expression to get SQL and parameters + try: + # Create a new parser instance to avoid circular imports + filter_parser = SQLODataParser(self.dynamic_var_char) + sql_filter, filter_params = filter_parser.prepare_odata_filter( + filter_expr + ) + if sql_filter: + # Prefix the filter with table name to avoid ambiguity + # Replace @param with @table_param_ to match the new parameter names + for key in filter_params.keys(): + sql_filter = sql_filter.replace( + f'@{key}', f'@{table}_{key}_' + ) + where_clauses.append(f'{table}.{sql_filter}') + # Update parameter names to avoid conflicts + for key, value in filter_params.items(): + new_key = f'{table}_{key}_' + all_filter_params[new_key] = value + except Exception as e: + # If filter parsing fails, log the error and skip it + print(f"Filter parsing failed for '{filter_expr}': {e}") + pass + + join_sql = '\n'.join(join_statements) + where_clause = ' AND '.join(where_clauses) if where_clauses else '' + + return join_sql, table_aliases, where_clause, all_filter_params diff --git a/wavefront/server/plugins/datasource/datasource/redshift/__init__.py b/wavefront/server/plugins/datasource/datasource/redshift/__init__.py new file mode 100644 index 00000000..3b26ae26 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/redshift/__init__.py @@ -0,0 +1,71 @@ +from typing import Any, Dict, List, Optional + +from ..types import DataSourceABC +from flo_cloud.aws.redshift import RedshiftClient as AWSRedshiftClient +from .config import RedshiftConfig + + +class RedshiftPlugin(DataSourceABC): + def __init__(self, config: RedshiftConfig): + self.config = config + self.client = AWSRedshiftClient( + host=config.host, + port=config.port, + database=config.database, + user=config.user, + password=config.password, + ) + self.db_name = f'{config.database}.public' + + def test_connection(self) -> bool: + return self.client.test_connection() + + def get_schema(self) -> dict: + return self.client.get_table_info() + + def get_table_names(self, **kwargs) -> list[str]: + return self.client.list_tables() + + def fetch_data( + self, + table_name: str, + projection: Optional[str] = None, + where_clause: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + offset: Optional[int] = None, + limit: Optional[int] = None, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + result = self.client.execute_query_to_dict( + projection=projection, + table_name=f'{self.db_name}.{table_name}', + where_clause=where_clause, + params=params, + limit=limit, + offset=offset, + order_by=order_by, + group_by=group_by, + ) + return result + + def insert_rows_json(self, table_name: str, data): + pass + + def execute_dynamic_query( + self, + query: List[Dict[str, Any]], + odata_filter: Optional[str] = None, + odata_params: Optional[Dict[str, Any]] = None, + odata_data_filter: Optional[str] = None, + odata_data_params: Optional[Dict[str, Any]] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 100, + params: Optional[Dict[str, Any]] = None, + ): + # TODO: Implement RLS filter support for Redshift + # For now, just execute the query without RLS filter + pass + + +__all__ = ['RedshiftPlugin', 'RedshiftConfig'] diff --git a/wavefront/server/plugins/datasource/datasource/redshift/config.py b/wavefront/server/plugins/datasource/datasource/redshift/config.py new file mode 100644 index 00000000..51321c0f --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/redshift/config.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass + + +@dataclass +class RedshiftConfig: + host: str + port: int + database: str + user: str + password: str diff --git a/wavefront/server/plugins/datasource/datasource/types.py b/wavefront/server/plugins/datasource/datasource/types.py new file mode 100644 index 00000000..074b6056 --- /dev/null +++ b/wavefront/server/plugins/datasource/datasource/types.py @@ -0,0 +1,101 @@ +from enum import Enum +from abc import ABC, abstractmethod +from typing import Any, Generic, TypeVar, List, Dict, Optional +from dataclasses import dataclass + + +@dataclass +class Meta: + status: str + message: str + code: int + + +T = TypeVar('T') + + +@dataclass +class DataSourceResult(Generic[T]): + meta: Meta + result: T + + +BooleanResult = DataSourceResult[bool] +SchemaResult = DataSourceResult[Dict[str, Any]] +StringResult = DataSourceResult[str] +TableListResult = DataSourceResult[List[str]] +QueryResult = DataSourceResult[List[Dict[str, Any]]] + + +class DataSourceType(str, Enum): + AWS_RDS = 'aws_rds' + AWS_S3 = 'aws_s3' + AWS_REDSHIFT = 'aws_redshift' + AZURE_BLOB_STORAGE = 'azure_blob_storage' + AZURE_DATA_LAKE = 'azure_data_lake' + AZURE_SQL_DATABASE = 'azure_sql_database' + AZURE_SQL_DATABASE_V2 = 'azure_sql_database_v2' + AZURE_SQL_DATA_WAREHOUSE = 'azure_sql_data_warehouse' + AZURE_SQL_DATA_WAREHOUSE_V2 = 'azure_sql_data_warehouse_v2' + AZURE_SYNAPSE = 'azure_synapse' + GCS = 'gcs' + GCP_BIGQUERY = 'gcp_bigquery' + MONGODB = 'mongodb' + MYSQL = 'mysql' + ORACLE = 'oracle' + POSTGRES = 'postgres' + REDIS = 'redis' + SNOWFLAKE = 'snowflake' + SQLITE = 'sqlite' + + +class DataSourceABC(ABC): + @abstractmethod + async def test_connection(self) -> bool: + pass + + @abstractmethod + def get_schema(self) -> dict: + pass + + @abstractmethod + def get_table_names(self, **kwargs) -> list[str]: + pass + + @abstractmethod + def fetch_data( + self, + table_names: List[str], + projection: Optional[str] = '*', + where_clause: Optional[str] = 'true', + join_query: Optional[str] = None, + params: Optional[Dict[str, Any]] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 10, + order_by: Optional[str] = None, + group_by: Optional[str] = None, + ) -> List[Dict[str, Any]]: + pass + + @abstractmethod + def insert_rows_json(self, table_name: str, data: List[Dict[str, Any]]) -> None: + pass + + @abstractmethod + async def execute_dynamic_query( + self, + query: List[Dict[str, Any]], + odata_filter: Optional[str] = None, + odata_params: Optional[Dict[str, Any]] = None, + odata_data_filter: Optional[str] = None, + odata_data_params: Optional[Dict[str, Any]] = None, + offset: Optional[int] = 0, + limit: Optional[int] = 100, + ): + pass + + @abstractmethod + async def execute_query( + self, query: str, use_legacy_sql: bool = False, dry_run: bool = False, **kwargs + ) -> Any: + pass diff --git a/wavefront/server/plugins/datasource/pyproject.toml b/wavefront/server/plugins/datasource/pyproject.toml new file mode 100644 index 00000000..05822a73 --- /dev/null +++ b/wavefront/server/plugins/datasource/pyproject.toml @@ -0,0 +1,27 @@ +[project] +name = "datasource" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.11" + +dependencies = [ + "flo-cloud", +] + +[tool.uv.sources] +flo-cloud = { workspace = true } + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" + +[tool.uv] +package = true + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["datasource"] diff --git a/wavefront/server/plugins/datasource/tests/test_join_operations.py b/wavefront/server/plugins/datasource/tests/test_join_operations.py new file mode 100644 index 00000000..c9d136c2 --- /dev/null +++ b/wavefront/server/plugins/datasource/tests/test_join_operations.py @@ -0,0 +1,613 @@ +#!/usr/bin/env python3 +""" +Test script for join operations in the OData parser +""" + +import pytest +from datasource.odata_parser import ( + JoinBuilder, + ODataQueryParser, + Lexer, + SQLFilterParser, +) +import os + +# Set cloud provider for testing +os.environ['CLOUD_PROVIDER'] = 'gcp' + +parser = ODataQueryParser(type='sql') + + +class TestODataQueryParser: + """Test cases for ODataQueryParser class""" + + def test_parse_empty_query(self): + """Test parsing empty query string""" + lexer = Lexer('') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result == {} + + def test_parse_none_query(self): + """Test parsing None query string""" + lexer = Lexer('') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result == {} + + def test_parse_simple_expand(self): + """Test parsing simple $expand parameter""" + lexer = Lexer('$expand=orders') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['expand_tables'] == ['orders'] + assert 'expand' in result + + def test_parse_multiple_expand(self): + """Test parsing multiple tables in $expand""" + lexer = Lexer('$expand=orders,payments') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['expand_tables'] == ['orders', 'payments'] + + def test_parse_nested_expand(self): + """Test parsing nested expand expressions""" + lexer = Lexer('$expand=orders($expand=payments)') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['expand_tables'] == ['orders', 'payments'] + + def test_parse_complex_nested_expand(self): + """Test parsing complex nested expand expressions""" + lexer = Lexer('$expand=orders($expand=payments,items),customers') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['expand_tables'] == ['orders', 'payments', 'items', 'customers'] + + def test_parse_join_parameter(self): + """Test parsing $join parameter""" + lexer = Lexer('$join=customer_id') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['join'] == ['customer_id'] + assert 'expand' not in result + + def test_parse_multiple_join_columns(self): + """Test parsing multiple join columns""" + lexer = Lexer('$join=customer_id,order_id,payment_id') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['join'] == ['customer_id', 'order_id', 'payment_id'] + + def test_parse_expand_and_join_together(self): + """Test parsing both $expand and $join parameters""" + lexer = Lexer('$expand=orders,payments&$join=customer_id,order_id') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['expand_tables'] == ['orders', 'payments'] + assert result['join'] == ['customer_id', 'order_id'] + + def test_parse_join_with_whitespace(self): + """Test parsing join columns with whitespace""" + lexer = Lexer('$join= customer_id , order_id ') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result['join'] == ['customer_id', 'order_id'] + + def test_parse_empty_join(self): + """Test parsing empty join parameter""" + lexer = Lexer('$join=') + query_parser = SQLFilterParser(lexer) + result = query_parser.parse_odata_query() + assert result == {} + + +class TestJoinBuilder: + """Test cases for JoinBuilder class""" + + def test_build_joins_empty_tables(self): + """Test building joins with empty table list""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [], [], 'customers' + ) + assert join_sql == '' + assert table_aliases == [] + assert where_clause == '' + assert filter_params == {} + + def test_build_single_join(self): + """Test building a single join""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [{'name': 'orders', 'filters': []}], ['customer_id'], 'customers' + ) + expected_sql = 'JOIN orders\n ON customers.customer_id = orders.customer_id' + assert join_sql == expected_sql + assert table_aliases == ['orders'] + assert where_clause == '' + assert filter_params == {} + + def test_build_multiple_joins(self): + """Test building multiple joins""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [{'name': 'orders', 'filters': []}, {'name': 'payments', 'filters': []}], + ['customer_id', 'order_id'], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_build_joins_single_column(self): + """Test building joins with single column for all tables""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [{'name': 'orders', 'filters': []}, {'name': 'payments', 'filters': []}], + ['customer_id'], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.customer_id = payments.customer_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_build_joins_insufficient_columns(self): + """Test building joins with fewer columns than tables""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [ + {'name': 'orders', 'filters': []}, + {'name': 'payments', 'filters': []}, + {'name': 'items', 'filters': []}, + ], + ['customer_id'], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.customer_id = payments.customer_id\n' + 'JOIN items\n' + ' ON payments.customer_id = items.customer_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments', 'items'] + assert where_clause == '' + assert filter_params == {} + + def test_build_joins_no_columns(self): + """Test building joins with no columns provided""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [{'name': 'orders', 'filters': []}, {'name': 'payments', 'filters': []}], + [], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.id = orders.id\n' + 'JOIN payments\n' + ' ON orders.id = payments.id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_build_joins_different_columns(self): + """Test building joins with different columns for each table""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [ + {'name': 'orders', 'filters': []}, + {'name': 'payments', 'filters': []}, + {'name': 'items', 'filters': []}, + ], + ['customer_id', 'order_id', 'item_id'], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id\n' + 'JOIN items\n' + ' ON payments.item_id = items.item_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments', 'items'] + assert where_clause == '' + assert filter_params == {} + + def test_build_joins_with_filters(self): + """Test building joins with filters""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [{'name': 'orders', 'filters': ['id eq "test"']}], + ['customer_id'], + 'customers', + ) + expected_sql = 'JOIN orders\n ON customers.customer_id = orders.customer_id' + assert join_sql == expected_sql + assert table_aliases == ['orders'] + assert where_clause == 'orders.id = @orders_id_' + assert filter_params == {'orders_id_': 'test'} + + def test_build_joins_with_multiple_filters(self): + """Test building joins with multiple filters""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + [ + {'name': 'orders', 'filters': ['status eq "active"']}, + {'name': 'payments', 'filters': ['amount gt 100']}, + ], + ['customer_id', 'order_id'], + 'customers', + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments'] + assert ( + where_clause + == 'orders.status = @orders_status_ AND payments.amount > @payments_amount_' + ) + assert filter_params == {'orders_status_': 'active', 'payments_amount_': 100} + + def test_build_joins_backward_compatibility(self): + """Test building joins with string table names for backward compatibility""" + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + ['orders', 'payments'], ['customer_id', 'order_id'], 'customers' + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + +class TestSQLODataParserJoinOperations: + """Test cases for SQLODataParser join operations""" + + def test_prepare_odata_joins_empty_query(self): + """Test prepare_odata_joins with empty query""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins('', 'customers') + ) + assert sql_expr == '' + assert table_aliases == [] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_none_query(self): + """Test prepare_odata_joins with None query""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins(None, 'customers') + ) + assert sql_expr == '' + assert table_aliases == [] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_simple_expand(self): + """Test prepare_odata_joins with simple expand""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins('$expand=orders', 'customers') + ) + expected_sql = 'JOIN orders\n ON customers.id = orders.id' + assert sql_expr == expected_sql + assert table_aliases == ['orders'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_multiple_expand(self): + """Test prepare_odata_joins with multiple expand tables""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins('$expand=orders,payments', 'customers') + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.id = orders.id\n' + 'JOIN payments\n' + ' ON orders.id = payments.id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_with_join_columns(self): + """Test prepare_odata_joins with explicit join columns""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + '$expand=orders,payments&$join=customer_id,order_id', 'customers' + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_nested_expand(self): + """Test prepare_odata_joins with nested expand""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins('$expand=orders($expand=payments)', 'customers') + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.id = orders.id\n' + 'JOIN payments\n' + ' ON orders.id = payments.id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_complex_nested(self): + """Test prepare_odata_joins with complex nested expand""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + '$expand=orders($expand=payments,items),customers', 'users' + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON users.id = orders.id\n' + 'JOIN payments\n' + ' ON orders.id = payments.id\n' + 'JOIN items\n' + ' ON payments.id = items.id\n' + 'JOIN customers\n' + ' ON items.id = customers.id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments', 'items', 'customers'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_invalid_query(self): + """Test prepare_odata_joins with invalid query format""" + # Invalid queries should return empty results rather than raising errors + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins('invalid query format', 'customers') + ) + assert sql_expr == '' + assert table_aliases == [] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_with_whitespace(self): + """Test prepare_odata_joins with whitespace in query""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + '$expand= orders , payments &$join= customer_id , order_id ', + 'customers', + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_single_join_column(self): + """Test prepare_odata_joins with single join column for multiple tables""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + '$expand=orders,payments,items&$join=customer_id', 'customers' + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.customer_id = payments.customer_id\n' + 'JOIN items\n' + ' ON payments.customer_id = items.customer_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments', 'items'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_rf_gold_item_details(self): + """Test prepare_odata_joins with rf_gold_item_details expand and specific join columns""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + '$expand=rf_gold_item_details&$join=id,gold_data_id', 'customers' + ) + ) + expected_sql = ( + 'JOIN rf_gold_item_details\n' + ' ON customers.id = rf_gold_item_details.gold_data_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['rf_gold_item_details'] + assert where_clause == '' + assert filter_params == {} + + def test_prepare_odata_joins_with_filter(self): + """Test prepare_odata_joins with filter in expand expression""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + "$expand=orders($filter=id eq 'test')&$join=customer_id", 'customers' + ) + ) + expected_sql = 'JOIN orders\n ON customers.customer_id = orders.customer_id' + assert sql_expr == expected_sql + assert table_aliases == ['orders'] + assert where_clause == 'orders.id = @orders_id_' + assert filter_params == {'orders_id_': 'test'} + + def test_prepare_odata_joins_with_multiple_filters(self): + """Test prepare_odata_joins with multiple filters in expand expressions""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + "$expand=orders($filter=status eq 'active'),payments($filter=amount gt 100)&$join=customer_id,order_id", + 'customers', + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert ( + where_clause + == 'orders.status = @orders_status_ AND payments.amount > @payments_amount_' + ) + assert filter_params == {'orders_status_': 'active', 'payments_amount_': 100} + + def test_prepare_odata_joins_with_nested_filter_and_expand(self): + """Test prepare_odata_joins with nested filter and expand""" + sql_expr, table_aliases, where_clause, filter_params = ( + parser.prepare_odata_joins( + "$expand=orders($expand=payments($filter=status eq 'pending'))&$join=customer_id,order_id", + 'customers', + ) + ) + expected_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert sql_expr == expected_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == 'payments.status = @payments_status_' + assert filter_params == {'payments_status_': 'pending'} + + +class TestIntegrationJoinOperations: + """Integration tests for join operations""" + + def test_complete_odata_query_parsing(self): + """Test complete OData query parsing with joins""" + query = "$expand=orders($expand=payments),customers&$join=customer_id,order_id&$filter=status eq 'active'" + + # Test query parsing + lexer = Lexer(query) + query_parser = SQLFilterParser(lexer) + parsed = query_parser.parse_odata_query() + + assert parsed['expand_tables'] == ['orders', 'payments', 'customers'] + assert parsed['join'] == ['customer_id', 'order_id'] + + # Test join building + builder = JoinBuilder() + join_sql, table_aliases, where_clause, filter_params = builder.build_joins( + parsed['expand'], parsed['join'], 'users' + ) + + expected_sql = ( + 'JOIN orders\n' + ' ON users.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id\n' + 'JOIN customers\n' + ' ON payments.order_id = customers.order_id' + ) + assert join_sql == expected_sql + assert table_aliases == ['orders', 'payments', 'customers'] + assert where_clause == '' + assert filter_params == {} + + def test_parser_integration_with_joins(self): + """Test SQLODataParser integration with join operations""" + # Test filter and join together + filter_expr = "status eq 'active'" + join_query = '$expand=orders,payments&$join=customer_id,order_id' + + # Parse filter + sql_filter, filter_params = parser.prepare_odata_filter(filter_expr) + assert sql_filter == 'status = @status' + assert filter_params == {'status': 'active'} + + # Parse joins + join_sql, table_aliases, where_clause, join_filter_params = ( + parser.prepare_odata_joins(join_query, 'customers') + ) + expected_join_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert join_sql == expected_join_sql + assert table_aliases == ['orders', 'payments'] + assert where_clause == '' + assert join_filter_params == {} + + def test_parser_integration_with_joins_and_filters(self): + """Test SQLODataParser integration with join operations and filters""" + # Test filter and join together with filters in expand + filter_expr = "status eq 'active'" + join_query = "$expand=orders($filter=id eq 'test'),payments($filter=amount gt 100)&$join=customer_id,order_id" + + # Parse filter + sql_filter, filter_params = parser.prepare_odata_filter(filter_expr) + assert sql_filter == 'status = @status' + assert filter_params == {'status': 'active'} + + # Parse joins with filters + join_sql, table_aliases, where_clause, join_filter_params = ( + parser.prepare_odata_joins(join_query, 'customers') + ) + expected_join_sql = ( + 'JOIN orders\n' + ' ON customers.customer_id = orders.customer_id\n' + 'JOIN payments\n' + ' ON orders.order_id = payments.order_id' + ) + assert join_sql == expected_join_sql + assert table_aliases == ['orders', 'payments'] + assert ( + where_clause + == 'orders.id = @orders_id_ AND payments.amount > @payments_amount_' + ) + assert join_filter_params == {'orders_id_': 'test', 'payments_amount_': 100} + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/wavefront/server/plugins/datasource/tests/test_odata_parser_lex.py b/wavefront/server/plugins/datasource/tests/test_odata_parser_lex.py new file mode 100644 index 00000000..739afef5 --- /dev/null +++ b/wavefront/server/plugins/datasource/tests/test_odata_parser_lex.py @@ -0,0 +1,256 @@ +# #!/usr/bin/env python3 +# """ +# Test script for the new grammar-based OData parser +# """ + +from datasource.odata_parser import ODataQueryParser + +from datetime import datetime +import os +import pytest + + +def fill_odata_query(sql_expr: str, parameters: dict = {}) -> str: + output_sql = sql_expr + dynamic_var_char = '@' + param_names = sorted(parameters.keys(), key=len, reverse=True) + for parameter in param_names: + if isinstance(parameters[parameter], str): + output_sql = output_sql.replace( + f'{dynamic_var_char}{parameter}', f"'{parameters[parameter]}'" + ) + if isinstance(parameters[parameter], int): + output_sql = output_sql.replace( + f'{dynamic_var_char}{parameter}', str(parameters[parameter]) + ) + + return output_sql + + +parser = ODataQueryParser(type='sql') + + +# Set cloud provider for testing +os.environ['CLOUD_PROVIDER'] = 'gcp' + + +def test_basic_equality_filter(): + filter_expr = "name eq 'John'" + expected_sql = 'name = @name' + expected_params = {'name': 'John'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_equality_filter_with_quotes(): + filter_expr = "branch eq 'Agar - (MP) 5323'" + expected_sql = 'branch = @branch' + expected_params = {'branch': 'Agar - (MP) 5323'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_numeric_comparison(): + filter_expr = 'age gt 25' + expected_sql = 'age > @age' + expected_params = {'age': 25} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_date_comparison(): + filter_expr = 'created_at gt 2024-01-01T00:00:00' + expected_sql = 'created_at > @created_at' + expected_params = {'created_at': datetime(2024, 1, 1, 0, 0)} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_contains_operator(): + filter_expr = "description contains 'test'" + expected_sql = 'description LIKE @description' + expected_params = {'description': '%test%'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_in_operator(): + filter_expr = "status in ['active', 'pending']" + expected_sql = 'status IN (@status_0, @status_1)' + expected_params = {'status_0': 'active', 'status_1': 'pending'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_and_condition(): + filter_expr = "age gt 25 $and status eq 'active'" + expected_sql = 'age > @age AND status = @status' + expected_params = {'age': 25, 'status': 'active'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_or_condition(): + filter_expr = "status eq 'active' $or status eq 'pending'" + expected_sql = 'status = @status OR status = @status_1' + expected_params = {'status': 'active', 'status_1': 'pending'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_complex_or_condition_with_quotes(): + filter_expr = "(branch eq 'Agar - (MP) 5323' $or created_at gt 2025-05-04T05:59:56)" + expected_sql = '(branch = @branch OR created_at > @created_at)' + expected_params = { + 'branch': 'Agar - (MP) 5323', + 'created_at': datetime(2025, 5, 4, 5, 59, 56), + } + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_empty_filter(): + sql_expr, params = parser.prepare_odata_filter('') + assert sql_expr is None + assert params is None + + +def test_invalid_operator(): + with pytest.raises(ValueError, match='Expected operator, got TokenType.FIELD'): + parser.prepare_odata_filter("name invalid_op 'John'") + + +def test_invalid_filter_format(): + with pytest.raises(ValueError, match='Expected operator, got TokenType.FIELD'): + parser.prepare_odata_filter('invalid filter format') + + +def test_multiple_conditions_with_same_field(): + filter_expr = "status eq 'active' $and status eq 'pending'" + expected_sql = 'status = @status AND status = @status_1' + expected_params = {'status': 'active', 'status_1': 'pending'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_less_than_or_equal(): + filter_expr = 'age lte 30' + expected_sql = 'age <= @age' + expected_params = {'age': 30} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_greater_than_or_equal(): + filter_expr = 'age gte 18' + expected_sql = 'age >= @age' + expected_params = {'age': 18} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_greater_than_or_equal_filling(): + filter_expr = 'age gte 18' + expected_sql = 'age >= 18' + sql_expr, params = parser.prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_same_field_filling(): + filter_expr = "status eq 'active' $and status eq 'pending'" + expected_sql = "status = 'active' AND status = 'pending'" + sql_expr, params = parser.prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_loan_amt(): + filter_expr = "created_at gt 2025-07-23T07:42:44 $and (loan_id contains '96444' $or branch contains '96444' $or region contains '96444' $or zone contains '96444' $or loan_amount eq '96444')" + expected_sql = "created_at > @created_at AND (loan_id LIKE '%96444%' OR branch LIKE '%96444%' OR region LIKE '%96444%' OR zone LIKE '%96444%' OR loan_amount = '96444')" + sql_expr, params = parser.prepare_odata_filter(filter_expr) + fill_odata = fill_odata_query(sql_expr, params) + assert fill_odata == expected_sql + + +def test_multiple_conditions_with_contains(): + filter_expr = '(loan_amount gt 50000 $and loan_amount lt 100000 $or loan_amount gt 100000 $and loan_amount lt 250000 $or loan_amount gt 250000 $and loan_amount lt 500000 $or loan_amount gt 500000) $and created_at gt 2025-07-23T08:03:37' + expected_sql = '(loan_amount > @loan_amount AND loan_amount < @loan_amount_1 OR loan_amount > @loan_amount_2 AND loan_amount < @loan_amount_3 OR loan_amount > @loan_amount_4 AND loan_amount < @loan_amount_5 OR loan_amount > @loan_amount_6) AND created_at > @created_at' + sql_expr, _ = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + + +def test_multiple_conditions_with_float(): + filter_expr = '(gold_purity gt 91.67) $and created_at gt 2025-07-23T10:07:03' + expected_sql = '(gold_purity > @gold_purity) AND created_at > @created_at' + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert params['gold_purity'] == 91.67 + assert sql_expr == expected_sql + + +def test_join_filter(): + filter_expr = 'a.id eq 1 $and b.customer_id eq 2' + expected_sql = 'a.id = @a_id AND b.customer_id = @b_customer_id' + expected_params = {'a_id': 1, 'b_customer_id': 2} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_join_filter_with_string_values(): + filter_expr = "a.name eq 'John' $and b.status eq 'active'" + expected_sql = 'a.name = @a_name AND b.status = @b_status' + expected_params = {'a_name': 'John', 'b_status': 'active'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_join_filter_with_multiple_conditions(): + filter_expr = 'a.id eq 1 $and b.customer_id eq 2 $and c.order_id eq 3' + expected_sql = ( + 'a.id = @a_id AND b.customer_id = @b_customer_id AND c.order_id = @c_order_id' + ) + expected_params = {'a_id': 1, 'b_customer_id': 2, 'c_order_id': 3} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_join_filter_with_contains_operator(): + filter_expr = "a.name contains 'John' $and b.description contains 'test'" + expected_sql = 'a.name LIKE @a_name AND b.description LIKE @b_description' + expected_params = {'a_name': '%John%', 'b_description': '%test%'} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_join_filter_with_comparison_operators(): + filter_expr = 'a.age gt 25 $and b.salary lt 50000' + expected_sql = 'a.age > @a_age AND b.salary < @b_salary' + expected_params = {'a_age': 25, 'b_salary': 50000} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params + + +def test_join_filter_with_same_field_different_tables(): + filter_expr = 'a.id eq 1 $and b.id eq 2' + expected_sql = 'a.id = @a_id AND b.id = @b_id' + expected_params = {'a_id': 1, 'b_id': 2} + sql_expr, params = parser.prepare_odata_filter(filter_expr) + assert sql_expr == expected_sql + assert params == expected_params diff --git a/wavefront/server/pyproject.toml b/wavefront/server/pyproject.toml new file mode 100644 index 00000000..b5df56d4 --- /dev/null +++ b/wavefront/server/pyproject.toml @@ -0,0 +1,35 @@ +[project] +name = "wavefront" +version = "0.1.0" +description = "Monorepo backend for wavefront apps" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ +] + +[dependency-groups] +dev = [ + "pre-commit>=4.2.0", + "ty>=0.0.1a28", +] + +[tool.uv.workspace] +members = [ + "apps/*", + "modules/*", + "misc/*", + "packages/*", + "plugins/*", + "background_jobs/*" +] + +[tool.uv] +required-version = ">=0.7.3" +override-dependencies = [ + "torch>=2.6.0 ; sys_platform != 'darwin'", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" +addopts = "--ignore=background_jobs/cadenceflo/test --ignore=background_jobs/auraflo/tests" diff --git a/wavefront/server/uv.lock b/wavefront/server/uv.lock new file mode 100644 index 00000000..779fad4b --- /dev/null +++ b/wavefront/server/uv.lock @@ -0,0 +1,6404 @@ +version = 1 +revision = 2 +requires-python = ">=3.11" +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'darwin'", + "python_full_version == '3.13.*' and sys_platform == 'darwin'", + "python_full_version >= '3.14' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "python_full_version == '3.13.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version >= '3.14' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform != 'darwin' and sys_platform != 'linux')", + "(python_full_version == '3.13.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.13.*' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version == '3.12.*' and sys_platform == 'darwin'", + "python_full_version == '3.12.*' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version == '3.12.*' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version == '3.12.*' and sys_platform != 'darwin' and sys_platform != 'linux')", + "python_full_version < '3.12' and sys_platform == 'darwin'", + "python_full_version < '3.12' and platform_machine == 'aarch64' and sys_platform == 'linux'", + "(python_full_version < '3.12' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version < '3.12' and sys_platform != 'darwin' and sys_platform != 'linux')", +] + +[manifest] +members = [ + "agents-module", + "api-services-module", + "auth-module", + "authenticator", + "call-processing", + "common-module", + "datasource", + "db-repo-module", + "flo-cloud", + "flo-utils", + "floconsole", + "floware", + "gold-module", + "image-search-module", + "inference-app", + "inference-module", + "insights-module", + "knowledge-base-module", + "llm-inference-config-module", + "plugins-module", + "product-analysis-module", + "rag-ingestion", + "tools-module", + "user-management-module", + "voice-agents-module", + "wavefront", + "workflow-job", +] +overrides = [{ name = "torch", marker = "sys_platform != 'darwin'", specifier = ">=2.6.0" }] + +[[package]] +name = "accelerate" +version = "0.34.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyyaml" }, + { name = "safetensors" }, + { name = "torch", marker = "sys_platform != 'darwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/82/5712c44a5a5ef7c4d375363b179a099e834491221ece02e81a1209b08233/accelerate-0.34.2.tar.gz", hash = "sha256:98c1ebe1f5a45c0a3af02dc60b5bb8b7d58d60c3326a326a06ce6d956b18ca5b", size = 328806, upload-time = "2024-09-05T16:45:20.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/5e/80cee674cdbe529ef008721d7eebb50ae5def4314211d82123aa23e828f8/accelerate-0.34.2-py3-none-any.whl", hash = "sha256:d69159e2c4e4a473d14443b27d2d732929254e826b3ab4813b3785b5ac616c7c", size = 324366, upload-time = "2024-09-05T16:45:17.121Z" }, +] + +[[package]] +name = "aenum" +version = "3.1.16" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/52/6ad8f63ec8da1bf40f96996d25d5b650fdd38f5975f8c813732c47388f18/aenum-3.1.16-py3-none-any.whl", hash = "sha256:9035092855a98e41b66e3d0998bd7b96280e85ceb3a04cc035636138a1943eaf", size = 165627, upload-time = "2025-04-25T03:17:58.89Z" }, +] + +[[package]] +name = "agents-module" +version = "0.1.0" +source = { editable = "modules/agents_module" } +dependencies = [ + { name = "api-services-module" }, + { name = "common-module" }, + { name = "flo-ai" }, + { name = "flo-cloud" }, + { name = "flo-utils" }, + { name = "tools-module" }, +] + +[package.metadata] +requires-dist = [ + { name = "api-services-module", editable = "modules/api_services_module" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "flo-ai", specifier = ">=1.1.0rc5" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "flo-utils", editable = "packages/flo_utils" }, + { name = "tools-module", editable = "modules/tools_module" }, +] + +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/e7/d92a237d8802ca88483906c388f7c201bbe96cd80a165ffd0ac2f6a8d59f/aiohttp-3.12.15.tar.gz", hash = "sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2", size = 7823716, upload-time = "2025-07-29T05:52:32.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/19/9e86722ec8e835959bd97ce8c1efa78cf361fa4531fca372551abcc9cdd6/aiohttp-3.12.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117", size = 711246, upload-time = "2025-07-29T05:50:15.937Z" }, + { url = "https://files.pythonhosted.org/packages/71/f9/0a31fcb1a7d4629ac9d8f01f1cb9242e2f9943f47f5d03215af91c3c1a26/aiohttp-3.12.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe", size = 483515, upload-time = "2025-07-29T05:50:17.442Z" }, + { url = "https://files.pythonhosted.org/packages/62/6c/94846f576f1d11df0c2e41d3001000527c0fdf63fce7e69b3927a731325d/aiohttp-3.12.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9", size = 471776, upload-time = "2025-07-29T05:50:19.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/6c/f766d0aaafcee0447fad0328da780d344489c042e25cd58fde566bf40aed/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5", size = 1741977, upload-time = "2025-07-29T05:50:21.665Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/fb779a05ba6ff44d7bc1e9d24c644e876bfff5abe5454f7b854cace1b9cc/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728", size = 1690645, upload-time = "2025-07-29T05:50:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/a22e799c2035f5d6a4ad2cf8e7c1d1bd0923192871dd6e367dafb158b14c/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16", size = 1789437, upload-time = "2025-07-29T05:50:25.007Z" }, + { url = "https://files.pythonhosted.org/packages/28/e5/55a33b991f6433569babb56018b2fb8fb9146424f8b3a0c8ecca80556762/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0", size = 1828482, upload-time = "2025-07-29T05:50:26.693Z" }, + { url = "https://files.pythonhosted.org/packages/c6/82/1ddf0ea4f2f3afe79dffed5e8a246737cff6cbe781887a6a170299e33204/aiohttp-3.12.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b", size = 1730944, upload-time = "2025-07-29T05:50:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/1b/96/784c785674117b4cb3877522a177ba1b5e4db9ce0fd519430b5de76eec90/aiohttp-3.12.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd", size = 1668020, upload-time = "2025-07-29T05:50:30.032Z" }, + { url = "https://files.pythonhosted.org/packages/12/8a/8b75f203ea7e5c21c0920d84dd24a5c0e971fe1e9b9ebbf29ae7e8e39790/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8", size = 1716292, upload-time = "2025-07-29T05:50:31.983Z" }, + { url = "https://files.pythonhosted.org/packages/47/0b/a1451543475bb6b86a5cfc27861e52b14085ae232896a2654ff1231c0992/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50", size = 1711451, upload-time = "2025-07-29T05:50:33.989Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/793a23a197cc2f0d29188805cfc93aa613407f07e5f9da5cd1366afd9d7c/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676", size = 1691634, upload-time = "2025-07-29T05:50:35.846Z" }, + { url = "https://files.pythonhosted.org/packages/ca/bf/23a335a6670b5f5dfc6d268328e55a22651b440fca341a64fccf1eada0c6/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7", size = 1785238, upload-time = "2025-07-29T05:50:37.597Z" }, + { url = "https://files.pythonhosted.org/packages/57/4f/ed60a591839a9d85d40694aba5cef86dde9ee51ce6cca0bb30d6eb1581e7/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7", size = 1805701, upload-time = "2025-07-29T05:50:39.591Z" }, + { url = "https://files.pythonhosted.org/packages/85/e0/444747a9455c5de188c0f4a0173ee701e2e325d4b2550e9af84abb20cdba/aiohttp-3.12.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685", size = 1718758, upload-time = "2025-07-29T05:50:41.292Z" }, + { url = "https://files.pythonhosted.org/packages/36/ab/1006278d1ffd13a698e5dd4bfa01e5878f6bddefc296c8b62649753ff249/aiohttp-3.12.15-cp311-cp311-win32.whl", hash = "sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b", size = 428868, upload-time = "2025-07-29T05:50:43.063Z" }, + { url = "https://files.pythonhosted.org/packages/10/97/ad2b18700708452400278039272032170246a1bf8ec5d832772372c71f1a/aiohttp-3.12.15-cp311-cp311-win_amd64.whl", hash = "sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d", size = 453273, upload-time = "2025-07-29T05:50:44.613Z" }, + { url = "https://files.pythonhosted.org/packages/63/97/77cb2450d9b35f517d6cf506256bf4f5bda3f93a66b4ad64ba7fc917899c/aiohttp-3.12.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7", size = 702333, upload-time = "2025-07-29T05:50:46.507Z" }, + { url = "https://files.pythonhosted.org/packages/83/6d/0544e6b08b748682c30b9f65640d006e51f90763b41d7c546693bc22900d/aiohttp-3.12.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444", size = 476948, upload-time = "2025-07-29T05:50:48.067Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1d/c8c40e611e5094330284b1aea8a4b02ca0858f8458614fa35754cab42b9c/aiohttp-3.12.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d", size = 469787, upload-time = "2025-07-29T05:50:49.669Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/b76438e70319796bfff717f325d97ce2e9310f752a267bfdf5192ac6082b/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c", size = 1716590, upload-time = "2025-07-29T05:50:51.368Z" }, + { url = "https://files.pythonhosted.org/packages/79/b1/60370d70cdf8b269ee1444b390cbd72ce514f0d1cd1a715821c784d272c9/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0", size = 1699241, upload-time = "2025-07-29T05:50:53.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2b/4968a7b8792437ebc12186db31523f541943e99bda8f30335c482bea6879/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab", size = 1754335, upload-time = "2025-07-29T05:50:55.394Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/49524ed553f9a0bec1a11fac09e790f49ff669bcd14164f9fab608831c4d/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb", size = 1800491, upload-time = "2025-07-29T05:50:57.202Z" }, + { url = "https://files.pythonhosted.org/packages/de/5e/3bf5acea47a96a28c121b167f5ef659cf71208b19e52a88cdfa5c37f1fcc/aiohttp-3.12.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545", size = 1719929, upload-time = "2025-07-29T05:50:59.192Z" }, + { url = "https://files.pythonhosted.org/packages/39/94/8ae30b806835bcd1cba799ba35347dee6961a11bd507db634516210e91d8/aiohttp-3.12.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c", size = 1635733, upload-time = "2025-07-29T05:51:01.394Z" }, + { url = "https://files.pythonhosted.org/packages/7a/46/06cdef71dd03acd9da7f51ab3a9107318aee12ad38d273f654e4f981583a/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd", size = 1696790, upload-time = "2025-07-29T05:51:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/02/90/6b4cfaaf92ed98d0ec4d173e78b99b4b1a7551250be8937d9d67ecb356b4/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f", size = 1718245, upload-time = "2025-07-29T05:51:05.911Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e6/2593751670fa06f080a846f37f112cbe6f873ba510d070136a6ed46117c6/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d", size = 1658899, upload-time = "2025-07-29T05:51:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/8f/28/c15bacbdb8b8eb5bf39b10680d129ea7410b859e379b03190f02fa104ffd/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519", size = 1738459, upload-time = "2025-07-29T05:51:09.56Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/c269cbc4faa01fb10f143b1670633a8ddd5b2e1ffd0548f7aa49cb5c70e2/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea", size = 1766434, upload-time = "2025-07-29T05:51:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/52/b0/4ff3abd81aa7d929b27d2e1403722a65fc87b763e3a97b3a2a494bfc63bc/aiohttp-3.12.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3", size = 1726045, upload-time = "2025-07-29T05:51:13.689Z" }, + { url = "https://files.pythonhosted.org/packages/71/16/949225a6a2dd6efcbd855fbd90cf476052e648fb011aa538e3b15b89a57a/aiohttp-3.12.15-cp312-cp312-win32.whl", hash = "sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1", size = 423591, upload-time = "2025-07-29T05:51:15.452Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d8/fa65d2a349fe938b76d309db1a56a75c4fb8cc7b17a398b698488a939903/aiohttp-3.12.15-cp312-cp312-win_amd64.whl", hash = "sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34", size = 450266, upload-time = "2025-07-29T05:51:17.239Z" }, + { url = "https://files.pythonhosted.org/packages/f2/33/918091abcf102e39d15aba2476ad9e7bd35ddb190dcdd43a854000d3da0d/aiohttp-3.12.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315", size = 696741, upload-time = "2025-07-29T05:51:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/7495a81e39a998e400f3ecdd44a62107254803d1681d9189be5c2e4530cd/aiohttp-3.12.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd", size = 474407, upload-time = "2025-07-29T05:51:21.165Z" }, + { url = "https://files.pythonhosted.org/packages/49/fc/a9576ab4be2dcbd0f73ee8675d16c707cfc12d5ee80ccf4015ba543480c9/aiohttp-3.12.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4", size = 466703, upload-time = "2025-07-29T05:51:22.948Z" }, + { url = "https://files.pythonhosted.org/packages/09/2f/d4bcc8448cf536b2b54eed48f19682031ad182faa3a3fee54ebe5b156387/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7", size = 1705532, upload-time = "2025-07-29T05:51:25.211Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f3/59406396083f8b489261e3c011aa8aee9df360a96ac8fa5c2e7e1b8f0466/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d", size = 1686794, upload-time = "2025-07-29T05:51:27.145Z" }, + { url = "https://files.pythonhosted.org/packages/dc/71/164d194993a8d114ee5656c3b7ae9c12ceee7040d076bf7b32fb98a8c5c6/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b", size = 1738865, upload-time = "2025-07-29T05:51:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/d198461b699188a93ead39cb458554d9f0f69879b95078dce416d3209b54/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d", size = 1788238, upload-time = "2025-07-29T05:51:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/85/b8/9e7175e1fa0ac8e56baa83bf3c214823ce250d0028955dfb23f43d5e61fd/aiohttp-3.12.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d", size = 1710566, upload-time = "2025-07-29T05:51:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/59/e4/16a8eac9df39b48ae102ec030fa9f726d3570732e46ba0c592aeeb507b93/aiohttp-3.12.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645", size = 1624270, upload-time = "2025-07-29T05:51:35.195Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f8/cd84dee7b6ace0740908fd0af170f9fab50c2a41ccbc3806aabcb1050141/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461", size = 1677294, upload-time = "2025-07-29T05:51:37.215Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/d0f1f85e50d401eccd12bf85c46ba84f947a84839c8a1c2c5f6e8ab1eb50/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9", size = 1708958, upload-time = "2025-07-29T05:51:39.328Z" }, + { url = "https://files.pythonhosted.org/packages/d5/6b/f6fa6c5790fb602538483aa5a1b86fcbad66244997e5230d88f9412ef24c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d", size = 1651553, upload-time = "2025-07-29T05:51:41.356Z" }, + { url = "https://files.pythonhosted.org/packages/04/36/a6d36ad545fa12e61d11d1932eef273928b0495e6a576eb2af04297fdd3c/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693", size = 1727688, upload-time = "2025-07-29T05:51:43.452Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c8/f195e5e06608a97a4e52c5d41c7927301bf757a8e8bb5bbf8cef6c314961/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64", size = 1761157, upload-time = "2025-07-29T05:51:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/05/6a/ea199e61b67f25ba688d3ce93f63b49b0a4e3b3d380f03971b4646412fc6/aiohttp-3.12.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51", size = 1710050, upload-time = "2025-07-29T05:51:48.203Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2e/ffeb7f6256b33635c29dbed29a22a723ff2dd7401fff42ea60cf2060abfb/aiohttp-3.12.15-cp313-cp313-win32.whl", hash = "sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0", size = 422647, upload-time = "2025-07-29T05:51:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/1b/8e/78ee35774201f38d5e1ba079c9958f7629b1fd079459aea9467441dbfbf5/aiohttp-3.12.15-cp313-cp313-win_amd64.whl", hash = "sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84", size = 449067, upload-time = "2025-07-29T05:51:52.549Z" }, +] + +[[package]] +name = "aiohttp-retry" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/61/ebda4d8e3d8cfa1fd3db0fb428db2dd7461d5742cea35178277ad180b033/aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1", size = 13608, upload-time = "2024-11-06T10:44:54.574Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/99/84ba7273339d0f3dfa57901b846489d2e5c2cd731470167757f1935fffbd/aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54", size = 9981, upload-time = "2024-11-06T10:44:52.917Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "alembic" +version = "1.16.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9a/ca/4dc52902cf3491892d464f5265a81e9dff094692c8a049a3ed6a05fe7ee8/alembic-1.16.5.tar.gz", hash = "sha256:a88bb7f6e513bd4301ecf4c7f2206fe93f9913f9b48dac3b78babde2d6fe765e", size = 1969868, upload-time = "2025-08-27T18:02:05.668Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/4a/4c61d4c84cfd9befb6fa08a702535b27b21fff08c946bc2f6139decbf7f7/alembic-1.16.5-py3-none-any.whl", hash = "sha256:e845dfe090c5ffa7b92593ae6687c5cb1a101e91fa53868497dbd79847f9dbe3", size = 247355, upload-time = "2025-08-27T18:02:07.37Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anthropic" +version = "0.57.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/75/6261a1a8d92aed47e27d2fcfb3a411af73b1435e6ae1186da02b760565d0/anthropic-0.57.1.tar.gz", hash = "sha256:7815dd92245a70d21f65f356f33fc80c5072eada87fb49437767ea2918b2c4b0", size = 423775, upload-time = "2025-07-03T16:57:35.932Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/cf/ca0ba77805aec6171629a8b665c7dc224dab374539c3d27005b5d8c100a0/anthropic-0.57.1-py3-none-any.whl", hash = "sha256:33afc1f395af207d07ff1bffc0a3d1caac53c371793792569c5d2f09283ea306", size = 292779, upload-time = "2025-07-03T16:57:34.636Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "api-services-module" +version = "0.1.0" +source = { editable = "modules/api_services_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.41.0" }, + { name = "fastapi", specifier = ">=0.104.0" }, + { name = "httpx", specifier = ">=0.25.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pyyaml", specifier = ">=6.0.1" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.24.0" }, +] + +[[package]] +name = "apscheduler" +version = "3.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347, upload-time = "2024-11-24T19:39:26.463Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004, upload-time = "2024-11-24T19:39:24.442Z" }, +] + +[[package]] +name = "argcomplete" +version = "1.10.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/28/07d2cfe0838f998ea2eafab59f52b0ceb1e70adb1831fa14b958a9fa6c5c/argcomplete-1.10.3.tar.gz", hash = "sha256:a37f522cf3b6a34abddfedb61c4546f60023b3799b22d1cd971eacdc0861530a", size = 50173, upload-time = "2019-11-26T19:12:49.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/8e/6b293f883fdbd29b9c8170db44bddff9e7de224d8cf1eb4287f69f1766e5/argcomplete-1.10.3-py2.py3-none-any.whl", hash = "sha256:d8ea63ebaec7f59e56e7b2a386b1d1c7f1a7ae87902c9ee17d377eaa557f06fa", size = 36576, upload-time = "2019-11-26T19:12:46.646Z" }, +] + +[[package]] +name = "asn1crypto" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/cf/d547feed25b5244fcb9392e288ff9fdc3280b10260362fc45d37a798a6ee/asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c", size = 121080, upload-time = "2022-03-15T14:46:52.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/7f/09065fd9e27da0eda08b4d6897f1c13535066174cc023af248fc2a8d5e5a/asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67", size = 105045, upload-time = "2022-03-15T14:46:51.055Z" }, +] + +[[package]] +name = "async-lru" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, + { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, + { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, + { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162, upload-time = "2024-10-20T00:29:41.88Z" }, + { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025, upload-time = "2024-10-20T00:29:43.352Z" }, + { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243, upload-time = "2024-10-20T00:29:44.922Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059, upload-time = "2024-10-20T00:29:46.891Z" }, + { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596, upload-time = "2024-10-20T00:29:49.201Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632, upload-time = "2024-10-20T00:29:50.768Z" }, + { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186, upload-time = "2024-10-20T00:29:52.394Z" }, + { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064, upload-time = "2024-10-20T00:29:53.757Z" }, + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "audioop-lts" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/3b/69ff8a885e4c1c42014c2765275c4bd91fe7bc9847e9d8543dbcbb09f820/audioop_lts-0.2.1.tar.gz", hash = "sha256:e81268da0baa880431b68b1308ab7257eb33f356e57a5f9b1f915dfb13dd1387", size = 30204, upload-time = "2024-08-04T21:14:43.957Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/91/a219253cc6e92db2ebeaf5cf8197f71d995df6f6b16091d1f3ce62cb169d/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_universal2.whl", hash = "sha256:fd1345ae99e17e6910f47ce7d52673c6a1a70820d78b67de1b7abb3af29c426a", size = 46252, upload-time = "2024-08-04T21:13:56.209Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f6/3cb21e0accd9e112d27cee3b1477cd04dafe88675c54ad8b0d56226c1e0b/audioop_lts-0.2.1-cp313-abi3-macosx_10_13_x86_64.whl", hash = "sha256:e175350da05d2087e12cea8e72a70a1a8b14a17e92ed2022952a4419689ede5e", size = 27183, upload-time = "2024-08-04T21:13:59.966Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7e/f94c8a6a8b2571694375b4cf94d3e5e0f529e8e6ba280fad4d8c70621f27/audioop_lts-0.2.1-cp313-abi3-macosx_11_0_arm64.whl", hash = "sha256:4a8dd6a81770f6ecf019c4b6d659e000dc26571b273953cef7cd1d5ce2ff3ae6", size = 26726, upload-time = "2024-08-04T21:14:00.846Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f8/a0e8e7a033b03fae2b16bc5aa48100b461c4f3a8a38af56d5ad579924a3a/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cd3c0b6f2ca25c7d2b1c3adeecbe23e65689839ba73331ebc7d893fcda7ffe", size = 80718, upload-time = "2024-08-04T21:14:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ea/a98ebd4ed631c93b8b8f2368862cd8084d75c77a697248c24437c36a6f7e/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff3f97b3372c97782e9c6d3d7fdbe83bce8f70de719605bd7ee1839cd1ab360a", size = 88326, upload-time = "2024-08-04T21:14:03.509Z" }, + { url = "https://files.pythonhosted.org/packages/33/79/e97a9f9daac0982aa92db1199339bd393594d9a4196ad95ae088635a105f/audioop_lts-0.2.1-cp313-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a351af79edefc2a1bd2234bfd8b339935f389209943043913a919df4b0f13300", size = 80539, upload-time = "2024-08-04T21:14:04.679Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d3/1051d80e6f2d6f4773f90c07e73743a1e19fcd31af58ff4e8ef0375d3a80/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aeb6f96f7f6da80354330470b9134d81b4cf544cdd1c549f2f45fe964d28059", size = 78577, upload-time = "2024-08-04T21:14:09.038Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/54f4c58bae8dc8c64a75071c7e98e105ddaca35449376fcb0180f6e3c9df/audioop_lts-0.2.1-cp313-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c589f06407e8340e81962575fcffbba1e92671879a221186c3d4662de9fe804e", size = 82074, upload-time = "2024-08-04T21:14:09.99Z" }, + { url = "https://files.pythonhosted.org/packages/36/89/2e78daa7cebbea57e72c0e1927413be4db675548a537cfba6a19040d52fa/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fbae5d6925d7c26e712f0beda5ed69ebb40e14212c185d129b8dfbfcc335eb48", size = 84210, upload-time = "2024-08-04T21:14:11.468Z" }, + { url = "https://files.pythonhosted.org/packages/a5/57/3ff8a74df2ec2fa6d2ae06ac86e4a27d6412dbb7d0e0d41024222744c7e0/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_i686.whl", hash = "sha256:d2d5434717f33117f29b5691fbdf142d36573d751716249a288fbb96ba26a281", size = 85664, upload-time = "2024-08-04T21:14:12.394Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/21cc4e5878f6edbc8e54be4c108d7cb9cb6202313cfe98e4ece6064580dd/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_ppc64le.whl", hash = "sha256:f626a01c0a186b08f7ff61431c01c055961ee28769591efa8800beadd27a2959", size = 93255, upload-time = "2024-08-04T21:14:13.707Z" }, + { url = "https://files.pythonhosted.org/packages/3e/28/7f7418c362a899ac3b0bf13b1fde2d4ffccfdeb6a859abd26f2d142a1d58/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_s390x.whl", hash = "sha256:05da64e73837f88ee5c6217d732d2584cf638003ac72df124740460531e95e47", size = 87760, upload-time = "2024-08-04T21:14:14.74Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d8/577a8be87dc7dd2ba568895045cee7d32e81d85a7e44a29000fe02c4d9d4/audioop_lts-0.2.1-cp313-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:56b7a0a4dba8e353436f31a932f3045d108a67b5943b30f85a5563f4d8488d77", size = 84992, upload-time = "2024-08-04T21:14:19.155Z" }, + { url = "https://files.pythonhosted.org/packages/ef/9a/4699b0c4fcf89936d2bfb5425f55f1a8b86dff4237cfcc104946c9cd9858/audioop_lts-0.2.1-cp313-abi3-win32.whl", hash = "sha256:6e899eb8874dc2413b11926b5fb3857ec0ab55222840e38016a6ba2ea9b7d5e3", size = 26059, upload-time = "2024-08-04T21:14:20.438Z" }, + { url = "https://files.pythonhosted.org/packages/3a/1c/1f88e9c5dd4785a547ce5fd1eb83fff832c00cc0e15c04c1119b02582d06/audioop_lts-0.2.1-cp313-abi3-win_amd64.whl", hash = "sha256:64562c5c771fb0a8b6262829b9b4f37a7b886c01b4d3ecdbae1d629717db08b4", size = 30412, upload-time = "2024-08-04T21:14:21.342Z" }, + { url = "https://files.pythonhosted.org/packages/c4/e9/c123fd29d89a6402ad261516f848437472ccc602abb59bba522af45e281b/audioop_lts-0.2.1-cp313-abi3-win_arm64.whl", hash = "sha256:c45317debeb64002e980077642afbd977773a25fa3dfd7ed0c84dccfc1fafcb0", size = 23578, upload-time = "2024-08-04T21:14:22.193Z" }, + { url = "https://files.pythonhosted.org/packages/7a/99/bb664a99561fd4266687e5cb8965e6ec31ba4ff7002c3fce3dc5ef2709db/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:3827e3fce6fee4d69d96a3d00cd2ab07f3c0d844cb1e44e26f719b34a5b15455", size = 46827, upload-time = "2024-08-04T21:14:23.034Z" }, + { url = "https://files.pythonhosted.org/packages/c4/e3/f664171e867e0768ab982715e744430cf323f1282eb2e11ebfb6ee4c4551/audioop_lts-0.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:161249db9343b3c9780ca92c0be0d1ccbfecdbccac6844f3d0d44b9c4a00a17f", size = 27479, upload-time = "2024-08-04T21:14:23.922Z" }, + { url = "https://files.pythonhosted.org/packages/a6/0d/2a79231ff54eb20e83b47e7610462ad6a2bea4e113fae5aa91c6547e7764/audioop_lts-0.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5b7b4ff9de7a44e0ad2618afdc2ac920b91f4a6d3509520ee65339d4acde5abf", size = 27056, upload-time = "2024-08-04T21:14:28.061Z" }, + { url = "https://files.pythonhosted.org/packages/86/46/342471398283bb0634f5a6df947806a423ba74b2e29e250c7ec0e3720e4f/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72e37f416adb43b0ced93419de0122b42753ee74e87070777b53c5d2241e7fab", size = 87802, upload-time = "2024-08-04T21:14:29.586Z" }, + { url = "https://files.pythonhosted.org/packages/56/44/7a85b08d4ed55517634ff19ddfbd0af05bf8bfd39a204e4445cd0e6f0cc9/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534ce808e6bab6adb65548723c8cbe189a3379245db89b9d555c4210b4aaa9b6", size = 95016, upload-time = "2024-08-04T21:14:30.481Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2a/45edbca97ea9ee9e6bbbdb8d25613a36e16a4d1e14ae01557392f15cc8d3/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2de9b6fb8b1cf9f03990b299a9112bfdf8b86b6987003ca9e8a6c4f56d39543", size = 87394, upload-time = "2024-08-04T21:14:31.883Z" }, + { url = "https://files.pythonhosted.org/packages/14/ae/832bcbbef2c510629593bf46739374174606e25ac7d106b08d396b74c964/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f24865991b5ed4b038add5edbf424639d1358144f4e2a3e7a84bc6ba23e35074", size = 84874, upload-time = "2024-08-04T21:14:32.751Z" }, + { url = "https://files.pythonhosted.org/packages/26/1c/8023c3490798ed2f90dfe58ec3b26d7520a243ae9c0fc751ed3c9d8dbb69/audioop_lts-0.2.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb3b7912ccd57ea53197943f1bbc67262dcf29802c4a6df79ec1c715d45a78", size = 88698, upload-time = "2024-08-04T21:14:34.147Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/5379d953d4918278b1f04a5a64b2c112bd7aae8f81021009da0dcb77173c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:120678b208cca1158f0a12d667af592e067f7a50df9adc4dc8f6ad8d065a93fb", size = 90401, upload-time = "2024-08-04T21:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/99/6e/3c45d316705ab1aec2e69543a5b5e458d0d112a93d08994347fafef03d50/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:54cd4520fc830b23c7d223693ed3e1b4d464997dd3abc7c15dce9a1f9bd76ab2", size = 91864, upload-time = "2024-08-04T21:14:36.158Z" }, + { url = "https://files.pythonhosted.org/packages/08/58/6a371d8fed4f34debdb532c0b00942a84ebf3e7ad368e5edc26931d0e251/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:d6bd20c7a10abcb0fb3d8aaa7508c0bf3d40dfad7515c572014da4b979d3310a", size = 98796, upload-time = "2024-08-04T21:14:37.185Z" }, + { url = "https://files.pythonhosted.org/packages/ee/77/d637aa35497e0034ff846fd3330d1db26bc6fd9dd79c406e1341188b06a2/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f0ed1ad9bd862539ea875fb339ecb18fcc4148f8d9908f4502df28f94d23491a", size = 94116, upload-time = "2024-08-04T21:14:38.145Z" }, + { url = "https://files.pythonhosted.org/packages/1a/60/7afc2abf46bbcf525a6ebc0305d85ab08dc2d1e2da72c48dbb35eee5b62c/audioop_lts-0.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e1af3ff32b8c38a7d900382646e91f2fc515fd19dea37e9392275a5cbfdbff63", size = 91520, upload-time = "2024-08-04T21:14:39.128Z" }, + { url = "https://files.pythonhosted.org/packages/65/6d/42d40da100be1afb661fd77c2b1c0dfab08af1540df57533621aea3db52a/audioop_lts-0.2.1-cp313-cp313t-win32.whl", hash = "sha256:f51bb55122a89f7a0817d7ac2319744b4640b5b446c4c3efcea5764ea99ae509", size = 26482, upload-time = "2024-08-04T21:14:40.269Z" }, + { url = "https://files.pythonhosted.org/packages/01/09/f08494dca79f65212f5b273aecc5a2f96691bf3307cac29acfcf84300c01/audioop_lts-0.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f0f2f336aa2aee2bce0b0dcc32bbba9178995454c7b979cf6ce086a8801e14c7", size = 30780, upload-time = "2024-08-04T21:14:41.128Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/be73b6015511aa0173ec595fc579133b797ad532996f2998fd6b8d1bbe6b/audioop_lts-0.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:78bfb3703388c780edf900be66e07de5a3d4105ca8e8720c5c4d67927e0b15d0", size = 23918, upload-time = "2024-08-04T21:14:42.803Z" }, +] + +[[package]] +name = "auth-module" +version = "0.0.1" +source = { editable = "modules/auth_module" } +dependencies = [ + { name = "common-module" }, + { name = "dependency-injector" }, + { name = "flo-cloud" }, + { name = "msgraph-sdk" }, + { name = "pyjwt", extra = ["crypto"] }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "msgraph-sdk", specifier = ">=1.5.4,<2.0.0" }, + { name = "pyjwt", extras = ["crypto"], specifier = ">=2.9.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, +] + +[[package]] +name = "authenticator" +version = "0.1.0" +source = { editable = "plugins/authenticator" } +dependencies = [ + { name = "requests" }, +] + +[package.metadata] +requires-dist = [{ name = "requests", specifier = ">=2.25.0" }] + +[[package]] +name = "authlib" +version = "1.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ce/bb/73a1f1c64ee527877f64122422dafe5b87a846ccf4ac933fe21bcbb8fee8/authlib-1.6.4.tar.gz", hash = "sha256:104b0442a43061dc8bc23b133d1d06a2b0a9c2e3e33f34c4338929e816287649", size = 164046, upload-time = "2025-09-17T09:59:23.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/aa/91355b5f539caf1b94f0e66ff1e4ee39373b757fce08204981f7829ede51/authlib-1.6.4-py2.py3-none-any.whl", hash = "sha256:39313d2a2caac3ecf6d8f95fbebdfd30ae6ea6ae6a6db794d976405fdd9aa796", size = 243076, upload-time = "2025-09-17T09:59:22.259Z" }, +] + +[[package]] +name = "azure-core" +version = "1.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "six" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/15/6b/2653adc0f33adba8f11b1903701e6b1c10d34ce5d8e25dfa13a422f832b0/azure_core-1.35.1.tar.gz", hash = "sha256:435d05d6df0fff2f73fb3c15493bb4721ede14203f1ff1382aa6b6b2bdd7e562", size = 345290, upload-time = "2025-09-11T22:58:04.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", hash = "sha256:12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", size = 211800, upload-time = "2025-09-11T22:58:06.281Z" }, +] + +[[package]] +name = "azure-identity" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-core" }, + { name = "cryptography" }, + { name = "msal" }, + { name = "msal-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/9e/4c9682a286c3c89e437579bd9f64f311020e5125c1321fd3a653166b5716/azure_identity-1.25.0.tar.gz", hash = "sha256:4177df34d684cddc026e6cf684e1abb57767aa9d84e7f2129b080ec45eee7733", size = 278507, upload-time = "2025-09-12T01:30:04.418Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/54/81683b6756676a22e037b209695b08008258e603f7e47c56834029c5922a/azure_identity-1.25.0-py3-none-any.whl", hash = "sha256:becaec086bbdf8d1a6aa4fb080c2772a0f824a97d50c29637ec8cc4933f1e82d", size = 190861, upload-time = "2025-09-12T01:30:06.474Z" }, +] + +[[package]] +name = "bcrypt" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/2c/3d44e853d1fe969d229bd58d39ae6902b3d924af0e2b5a60d17d4b809ded/bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281", size = 483719, upload-time = "2025-02-28T01:22:34.539Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e2/58ff6e2a22eca2e2cff5370ae56dba29d70b1ea6fc08ee9115c3ae367795/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb", size = 272001, upload-time = "2025-02-28T01:22:38.078Z" }, + { url = "https://files.pythonhosted.org/packages/37/1f/c55ed8dbe994b1d088309e366749633c9eb90d139af3c0a50c102ba68a1a/bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180", size = 277451, upload-time = "2025-02-28T01:22:40.787Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1c/794feb2ecf22fe73dcfb697ea7057f632061faceb7dcf0f155f3443b4d79/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f", size = 272792, upload-time = "2025-02-28T01:22:43.144Z" }, + { url = "https://files.pythonhosted.org/packages/13/b7/0b289506a3f3598c2ae2bdfa0ea66969812ed200264e3f61df77753eee6d/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09", size = 289752, upload-time = "2025-02-28T01:22:45.56Z" }, + { url = "https://files.pythonhosted.org/packages/dc/24/d0fb023788afe9e83cc118895a9f6c57e1044e7e1672f045e46733421fe6/bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d", size = 277762, upload-time = "2025-02-28T01:22:47.023Z" }, + { url = "https://files.pythonhosted.org/packages/e4/38/cde58089492e55ac4ef6c49fea7027600c84fd23f7520c62118c03b4625e/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd", size = 272384, upload-time = "2025-02-28T01:22:49.221Z" }, + { url = "https://files.pythonhosted.org/packages/de/6a/d5026520843490cfc8135d03012a413e4532a400e471e6188b01b2de853f/bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af", size = 277329, upload-time = "2025-02-28T01:22:51.603Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a3/4fc5255e60486466c389e28c12579d2829b28a527360e9430b4041df4cf9/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231", size = 305241, upload-time = "2025-02-28T01:22:53.283Z" }, + { url = "https://files.pythonhosted.org/packages/c7/15/2b37bc07d6ce27cc94e5b10fd5058900eb8fb11642300e932c8c82e25c4a/bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c", size = 309617, upload-time = "2025-02-28T01:22:55.461Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/99f65edb09e6c935232ba0430c8c13bb98cb3194b6d636e61d93fe60ac59/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f", size = 335751, upload-time = "2025-02-28T01:22:57.81Z" }, + { url = "https://files.pythonhosted.org/packages/00/1b/b324030c706711c99769988fcb694b3cb23f247ad39a7823a78e361bdbb8/bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d", size = 355965, upload-time = "2025-02-28T01:22:59.181Z" }, + { url = "https://files.pythonhosted.org/packages/aa/dd/20372a0579dd915dfc3b1cd4943b3bca431866fcb1dfdfd7518c3caddea6/bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4", size = 155316, upload-time = "2025-02-28T01:23:00.763Z" }, + { url = "https://files.pythonhosted.org/packages/6d/52/45d969fcff6b5577c2bf17098dc36269b4c02197d551371c023130c0f890/bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669", size = 147752, upload-time = "2025-02-28T01:23:02.908Z" }, + { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, + { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, + { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, + { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, + { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, + { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, + { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, + { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, + { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, + { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, + { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, + { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, + { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, + { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, + { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, + { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, + { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, + { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, + { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, + { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, + { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, + { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.8.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/ba/0e121661f529e7f456e903bf5c4d255b8051d8ce2b5e629c5212efe4c3f1/beautifulsoup4-4.8.2.tar.gz", hash = "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a", size = 298650, upload-time = "2019-12-24T22:28:22.187Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a1/c698cf319e9cfed6b17376281bd0efc6bfc8465698f54170ef60a485ab5d/beautifulsoup4-4.8.2-py3-none-any.whl", hash = "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887", size = 106874, upload-time = "2019-12-24T22:28:20.142Z" }, +] + +[[package]] +name = "boto3" +version = "1.38.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/96/fc74d8521d2369dd8c412438401ff12e1350a1cd3eab5c758ed3dd5e5f82/boto3-1.38.27.tar.gz", hash = "sha256:94bd7fdd92d5701b362d4df100d21e28f8307a67ff56b6a8b0398119cf22f859", size = 111875, upload-time = "2025-05-30T19:32:41.352Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/8b/b2361188bd1e293eede1bc165e2461d390394f71ec0c8c21211c8dabf62c/boto3-1.38.27-py3-none-any.whl", hash = "sha256:95f5fe688795303a8a15e8b7e7f255cadab35eae459d00cc281a4fd77252ea80", size = 139938, upload-time = "2025-05-30T19:32:38.006Z" }, +] + +[[package]] +name = "botocore" +version = "1.38.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/5e/67899214ad57f7f26af5bd776ac5eb583dc4ecf5c1e52e2cbfdc200e487a/botocore-1.38.27.tar.gz", hash = "sha256:9788f7efe974328a38cbade64cc0b1e67d27944b899f88cb786ae362973133b6", size = 13919963, upload-time = "2025-05-30T19:32:29.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/83/a753562020b69fa90cebc39e8af2c753b24dcdc74bee8355ee3f6cefdf34/botocore-1.38.27-py3-none-any.whl", hash = "sha256:a785d5e9a5eda88ad6ab9ed8b87d1f2ac409d0226bba6ff801c55359e94d91a8", size = 13580545, upload-time = "2025-05-30T19:32:26.712Z" }, +] + +[[package]] +name = "cachetools" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/61/e4fad8155db4a04bfb4734c7c8ff0882f078f24294d42798b3568eb63bff/cachetools-6.2.0.tar.gz", hash = "sha256:38b328c0889450f05f5e120f56ab68c8abaf424e1275522b138ffc93253f7e32", size = 30988, upload-time = "2025-08-25T18:57:30.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/56/3124f61d37a7a4e7cc96afc5492c78ba0cb551151e530b54669ddd1436ef/cachetools-6.2.0-py3-none-any.whl", hash = "sha256:1c76a8960c0041fcc21097e357f882197c79da0dbff766e7317890a65d7d8ba6", size = 11276, upload-time = "2025-08-25T18:57:29.684Z" }, +] + +[[package]] +name = "call-processing" +version = "0.1.0" +source = { editable = "apps/call_processing" } +dependencies = [ + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "pipecat-ai", extra = ["cartesia", "deepgram", "google", "groq", "runner", "silero", "websocket"] }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "python-multipart" }, + { name = "redis" }, + { name = "tenacity" }, + { name = "twilio" }, + { name = "uvicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "dependency-injector", specifier = ">=4.46.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "httpx", specifier = ">=0.27.0" }, + { name = "pipecat-ai", extras = ["websocket", "cartesia", "google", "silero", "deepgram", "groq", "runner"], specifier = "==0.0.91" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, + { name = "python-multipart", specifier = ">=0.0.9" }, + { name = "redis", specifier = ">=5.0.0" }, + { name = "tenacity", specifier = ">=8.0.0" }, + { name = "twilio", specifier = ">=8.0.0" }, + { name = "uvicorn", specifier = ">=0.30.5,<1.0.0" }, +] + +[[package]] +name = "cartesia" +version = "2.0.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13' and python_full_version < '4.0'" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "iterators" }, + { name = "pydantic" }, + { name = "pydantic-core" }, + { name = "pydub" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/ff/bfd3191a7fdbbb5c4dfe4d34461c6aa0d158a6eea599cb9a5df2c91109fa/cartesia-2.0.17.tar.gz", hash = "sha256:fd7fcdcbb5aac47ff6b35cd48420b4993ef1742aaa71bb7d52b335314045d584", size = 79227, upload-time = "2025-11-13T21:06:45.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/9c/f7b83329e0567d0ab165abd81405108d146abc9728732c1af3858ee38bfd/cartesia-2.0.17-py3-none-any.whl", hash = "sha256:de8975ced1c5c09f1b51bb87ceea6c1641ba817901cfc73c47fc4e37c6ca351a", size = 153376, upload-time = "2025-11-13T21:06:42.872Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "chardet" +version = "3.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", size = 1868453, upload-time = "2017-06-08T14:34:35.581Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/a9/01ffebfb562e4274b6487b4bb1ddec7ca55ec7510b22e4c51f14098443b8/chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691", size = 133356, upload-time = "2017-06-08T14:34:33.552Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "click" +version = "8.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "humanfriendly" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, +] + +[[package]] +name = "common-module" +version = "0.1.0" +source = { editable = "modules/common_module" } +dependencies = [ + { name = "apscheduler" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "loguru" }, + { name = "prometheus-client" }, + { name = "redis" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, +] + +[package.metadata] +requires-dist = [ + { name = "apscheduler", specifier = ">=3.11.0,<4.0.0" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "loguru", specifier = ">=0.7.2,<1.0.0" }, + { name = "prometheus-client", specifier = ">=0.22.1,<1.0.0" }, + { name = "redis", specifier = ">=5.2.1,<6.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.4,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, +] + +[[package]] +name = "compressed-rtf" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/0c/929a4e8ef9d7143f54d77dadb5f370cc7b98534b1bd6e1124d0abe8efb24/compressed_rtf-1.0.7.tar.gz", hash = "sha256:7c30859334839f3cdc7d10796af5b434bb326b9df7cb5a65e95a8eacb2951b0e", size = 8152, upload-time = "2025-03-24T22:39:32.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/1d/62f5bf92e12335eb63517f42671ed78512d48bbc69e02a942dd7b90f03f0/compressed_rtf-1.0.7-py3-none-any.whl", hash = "sha256:b7904921d78c67a0a4b7fff9fb361a00ae2b447b6edca010ce321cd98fa0fcc0", size = 7968, upload-time = "2025-03-24T23:03:57.433Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + +[[package]] +name = "dacite" +version = "1.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/55/a0/7ca79796e799a3e782045d29bf052b5cde7439a2bbb17f15ff44f7aacc63/dacite-1.9.2.tar.gz", hash = "sha256:6ccc3b299727c7aa17582f0021f6ae14d5de47c7227932c47fec4cdfefd26f09", size = 22420, upload-time = "2025-02-05T09:27:29.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/35/386550fd60316d1e37eccdda609b074113298f23cef5bddb2049823fe666/dacite-1.9.2-py3-none-any.whl", hash = "sha256:053f7c3f5128ca2e9aceb66892b1a3c8936d02c686e707bee96e19deef4bc4a0", size = 16600, upload-time = "2025-02-05T09:27:24.345Z" }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] + +[[package]] +name = "datasource" +version = "0.1.0" +source = { editable = "plugins/datasource" } +dependencies = [ + { name = "flo-cloud" }, +] + +[package.metadata] +requires-dist = [{ name = "flo-cloud", editable = "packages/flo_cloud" }] + +[[package]] +name = "db-repo-module" +version = "0.1.0" +source = { editable = "modules/db_repo_module" } +dependencies = [ + { name = "alembic" }, + { name = "common-module" }, + { name = "dependency-injector" }, + { name = "pgvector" }, + { name = "psycopg", extra = ["binary", "pool"] }, + { name = "redis" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, +] + +[package.metadata] +requires-dist = [ + { name = "alembic", specifier = ">=1.14.1,<2.0.0" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "pgvector", specifier = ">=0.4.1" }, + { name = "psycopg", extras = ["binary", "pool"], specifier = ">=3.2.3,<4.0.0" }, + { name = "redis", specifier = ">=5.2.1,<6.0.0" }, + { name = "sqlalchemy", specifier = ">=2.0.36,<3.0.0" }, + { name = "tenacity", specifier = ">=8.1.0,<9.0.0" }, +] + +[[package]] +name = "deepgram-sdk" +version = "4.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aenum" }, + { name = "aiofiles" }, + { name = "aiohttp" }, + { name = "dataclasses-json" }, + { name = "deprecation" }, + { name = "httpx" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/c7/3c5918c2c74e3d56cf3d738aa174bc688c73069dc9682fc1bfaeb2058cc6/deepgram_sdk-4.7.0.tar.gz", hash = "sha256:e371396d8835d449782df472c3bd501f6cad41b3c925f66771933ff3fc4b1a13", size = 100128, upload-time = "2025-07-21T15:43:56.705Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/63/43a6e46b35eae9739e22b5cace4a22ece76d4aff74b563563b9507411484/deepgram_sdk-4.7.0-py3-none-any.whl", hash = "sha256:1a2a0890aa43cbc510e07b0f911f6841770ca0222e6fcc069bd3e2afcde1c061", size = 157911, upload-time = "2025-07-21T15:43:55.695Z" }, +] + +[[package]] +name = "dependency-injector" +version = "4.48.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/a4/619df82de38ce6451cc1acb549237cbd9306c4bfbcec6e8e1fdbceb8c5f3/dependency_injector-4.48.2.tar.gz", hash = "sha256:9ce6089d75a5dd0b6191a243f41d2c2746802bb39550ad431242c15136fefd60", size = 1103335, upload-time = "2025-09-19T10:19:43.492Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/56/dce91cc7638a4be4d83e18d20edd3f9b295440b1897d972f7a8ce3ea240f/dependency_injector-4.48.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:54d0178be10f17b768afb5c0ed1c5c565abaa2d097b2bc5a529a31c580613df2", size = 1755919, upload-time = "2025-09-19T10:18:53.97Z" }, + { url = "https://files.pythonhosted.org/packages/67/80/c29f5cb5fd794ea453b240e6d6682a07cdc519a4bd76589c4b75a1bb7a91/dependency_injector-4.48.2-cp310-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:12a15979fd534b728b3061c8aa52fd55adb77574758817daae9df8a1c2eb830b", size = 1855277, upload-time = "2025-09-19T10:18:55.911Z" }, + { url = "https://files.pythonhosted.org/packages/79/fa/20f14684dfb822f4b72623d4c1250149ba2fcc95a831ae334605eff31b33/dependency_injector-4.48.2-cp310-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85cdf4b423884d4a24a18b970abe73352fb210761302cd6b5ebc6e9a20dbe53f", size = 1760596, upload-time = "2025-09-19T10:18:57.636Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c0/06dddb1b21f64bd1e948244aed1743c8013ea7800fc6e3e470b0019dd93e/dependency_injector-4.48.2-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a9b457b95a400b7a2de0978a55768cdd104bd265953bf0ed06e7f25d18f35ed2", size = 1742442, upload-time = "2025-09-19T10:18:59.041Z" }, + { url = "https://files.pythonhosted.org/packages/63/23/32575e230f5baf8082eb776847756024441207eabbb03ada679c29061070/dependency_injector-4.48.2-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:417809f565c39800adb744d666dfe4d94eae510b73ec33f932d592415d7c46d0", size = 1842421, upload-time = "2025-09-19T10:19:01.446Z" }, + { url = "https://files.pythonhosted.org/packages/3c/99/6595e8235d8e120129098d7a56b0491be313bab5415fc9d107ad1ae2a967/dependency_injector-4.48.2-cp310-abi3-win32.whl", hash = "sha256:f014aa7bab427932802d59967d9fe0863a0001db66446177dcc62e47f3a6b234", size = 1512262, upload-time = "2025-09-19T10:19:03.029Z" }, + { url = "https://files.pythonhosted.org/packages/96/04/cf1d482d163bf8c7cfd886cb4cf8eed950b366c2723dea2b21874ef2201c/dependency_injector-4.48.2-cp310-abi3-win_amd64.whl", hash = "sha256:e3fcdeb8189f3e1f87fde9276061f8a6cc596c2fa139bc4b4d1f571035ebd645", size = 1640200, upload-time = "2025-09-19T10:19:04.549Z" }, + { url = "https://files.pythonhosted.org/packages/fd/1e/2a27d65b34419818a89024413b428f1effd6125e185c4902486796281ff2/dependency_injector-4.48.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:174b117a7a5a28d45c004a3242b28184db12f9818ff4ea8d0cebc230bb5f7b65", size = 1736171, upload-time = "2025-09-19T10:19:35.431Z" }, + { url = "https://files.pythonhosted.org/packages/7b/9c/4f55dc60c6dc3f59defd461213b58d90d977c0928e20a98d3d870413db86/dependency_injector-4.48.2-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8eed93fa25368be43bc3486bddedcc5ad0a62e0a9a441d060eb80ae7980416b0", size = 1828380, upload-time = "2025-09-19T10:19:37.36Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/16a09fdf0eb368e1c2395979ec9a4bdf7829a57ae3524be491bd5e7f05b4/dependency_injector-4.48.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:85368f039e7fbef1d5f38f8ba42eca28b950288dc72aca13534ed4b3e96ee8cf", size = 1736520, upload-time = "2025-09-19T10:19:38.986Z" }, + { url = "https://files.pythonhosted.org/packages/7c/2f/41598584075fef9e2bc33c102ba2e0b91ffb207d914b19402d3abf566de8/dependency_injector-4.48.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:05ff29398e23a08e840c9a89a0b516d988b337a38534d33791857bd1defd2d23", size = 1623553, upload-time = "2025-09-19T10:19:40.947Z" }, +] + +[[package]] +name = "deprecated" +version = "1.2.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744, upload-time = "2025-01-27T10:46:25.7Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998, upload-time = "2025-01-27T10:46:09.186Z" }, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788, upload-time = "2020-04-20T14:23:38.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178, upload-time = "2020-04-20T14:23:36.581Z" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "dnspython" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + +[[package]] +name = "docx2txt" +version = "0.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/07/4486a038624e885e227fe79111914c01f55aa70a51920ff1a7f2bd216d10/docx2txt-0.9.tar.gz", hash = "sha256:18013f6229b14909028b19aa7bf4f8f3d6e4632d7b089ab29f7f0a4d1f660e28", size = 3613, upload-time = "2025-03-24T20:59:25.21Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/51/756e71bec48ece0ecc2a10e921ef2756e197dcb7e478f2b43673b6683902/docx2txt-0.9-py3-none-any.whl", hash = "sha256:e3718c0653fd6f2fcf4b51b02a61452ad1c38a4c163bcf0a6fd9486cd38f529a", size = 4025, upload-time = "2025-03-24T20:59:24.394Z" }, +] + +[[package]] +name = "ebcdic" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/2f/633031205333bee5f9f93761af8268746aa75f38754823aabb8570eb245b/ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1", size = 128537, upload-time = "2019-08-09T00:54:35.544Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, +] + +[[package]] +name = "email-validator" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, +] + +[[package]] +name = "extract-msg" +version = "0.29.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "compressed-rtf" }, + { name = "ebcdic" }, + { name = "imapclient" }, + { name = "olefile" }, + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/70/60c17682d8b95077c526fe8374061f309fa647836f7783ee14b1277a9d9b/extract_msg-0.29.0.tar.gz", hash = "sha256:ae6ce5f78fddb582350cb49bbf2776eadecdbf3c74b7a305dced42bd187a5401", size = 72891, upload-time = "2022-01-14T06:12:57.822Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/dc/511f62860fc076fc4e27bfbb1bc6b1f2b61e694d68007853d983d1877bdf/extract_msg-0.29.0-py2.py3-none-any.whl", hash = "sha256:a8885dc385d0c88c4b87fb2a573727c0115cd2ef5157956cf183878f940eef28", size = 72912, upload-time = "2022-01-14T06:12:56.361Z" }, +] + +[[package]] +name = "fastapi" +version = "0.115.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/72/d83b98cd106541e8f5e5bfab8ef2974ab45a62e8a6c5b5e6940f26d2ed4b/fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654", size = 301336, upload-time = "2024-12-03T22:46:01.629Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/b3/7e4df40e585df024fac2f80d1a2d579c854ac37109675db2b0cc22c0bb9e/fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305", size = 94843, upload-time = "2024-12-03T22:45:59.368Z" }, +] + +[package.optional-dependencies] +all = [ + { name = "email-validator" }, + { name = "fastapi-cli", extra = ["standard"] }, + { name = "httpx" }, + { name = "itsdangerous" }, + { name = "jinja2" }, + { name = "orjson" }, + { name = "pydantic-extra-types" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pyyaml" }, + { name = "ujson" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cli" +version = "0.0.16" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rich-toolkit" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/75/9407a6b452be4c988feacec9c9d2f58d8f315162a6c7258d5a649d933ebe/fastapi_cli-0.0.16.tar.gz", hash = "sha256:e8a2a1ecf7a4e062e3b2eec63ae34387d1e142d4849181d936b23c4bdfe29073", size = 19447, upload-time = "2025-11-10T19:01:07.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/43/678528c19318394320ee43757648d5e0a8070cf391b31f69d931e5c840d2/fastapi_cli-0.0.16-py3-none-any.whl", hash = "sha256:addcb6d130b5b9c91adbbf3f2947fe115991495fdb442fe3e51b5fc6327df9f4", size = 12312, upload-time = "2025-11-10T19:01:06.728Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "fastapi-cloud-cli" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[[package]] +name = "fastapi-cloud-cli" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastar" }, + { name = "httpx" }, + { name = "pydantic", extra = ["email"] }, + { name = "rich-toolkit" }, + { name = "rignore" }, + { name = "sentry-sdk" }, + { name = "typer" }, + { name = "uvicorn", extra = ["standard"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/6c/32527a1fd7eee91dd0c52bbe6b7c21e3272b24beb436b142b2c9c01f922d/fastapi_cloud_cli-0.5.2.tar.gz", hash = "sha256:34d04ffadb2562c3ebb39e6f5a599c47353750f75a2ef0a2d9c1442ed09f3308", size = 30744, upload-time = "2025-11-25T11:06:14.583Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/29/5b46612e48589df93cbd9a77fe35a36315937386f7ecc0f1d7b9b10ad0ca/fastapi_cloud_cli-0.5.2-py3-none-any.whl", hash = "sha256:783ab5e41baf8afcbbfb7e513ac5bdf7376202053b8bf2d158a38978dba1ca69", size = 23218, upload-time = "2025-11-25T11:06:13.215Z" }, +] + +[[package]] +name = "fastar" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/e7/f89d54fb04104114dd0552836dc2b47914f416cc0e200b409dd04a33de5e/fastar-0.8.0.tar.gz", hash = "sha256:f4d4d68dbf1c4c2808f0e730fac5843493fc849f70fe3ad3af60dfbaf68b9a12", size = 68524, upload-time = "2025-11-26T02:36:00.72Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/15/1c764530b81b266f6d27d78d49b6bef22a73b3300cd83a280bfd244908c5/fastar-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cd9c0d3ebf7a0a6f642f771cf41b79f7c98d40a3072a8abe1174fbd9bd615bd3", size = 708427, upload-time = "2025-11-26T02:34:36.502Z" }, + { url = "https://files.pythonhosted.org/packages/41/fc/75d42c008516543219e4293e4d8ac55da57a5c63147484f10468bd1bc24e/fastar-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2875a077340fe4f8099bd3ed8fa90d9595e1ac3cd62ae19ab690d5bf550eeb35", size = 631740, upload-time = "2025-11-26T02:34:20.718Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/9632984f7824ed2210157dcebd8e9821ef6d4f2b28510d0516db6625ff9b/fastar-0.8.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a999263d9f87184bf2801833b2ecf105e03c0dd91cac78685673b70da564fd64", size = 871628, upload-time = "2025-11-26T02:33:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/05/97/3eb6ea71b7544d45cd29cacb764ca23cde8ce0aed1a6a02251caa4c0a818/fastar-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c41111da56430f638cbfc498ebdcc7d30f63416e904b27b7695c29bd4889cb8", size = 765005, upload-time = "2025-11-26T02:32:45.833Z" }, + { url = "https://files.pythonhosted.org/packages/d6/45/3eb0ee945a0b5d5f9df7e7c25c037ce7fa441cd0b4d44f76d286e2f4396a/fastar-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3719541a12bb09ab1eae91d2c987a9b2b7d7149c52e7109ba6e15b74aabc49b1", size = 765587, upload-time = "2025-11-26T02:33:01.174Z" }, + { url = "https://files.pythonhosted.org/packages/51/bb/7defd6ec0d9570b1987d8ebde52d07d97f3f26e10b592fb3e12738eba39a/fastar-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9b0fff8079b18acdface7ef1b7f522fd9a589f65ca4a1a0dd7c92a0886c2a2", size = 931150, upload-time = "2025-11-26T02:33:17.374Z" }, + { url = "https://files.pythonhosted.org/packages/28/54/62e51e684dab347c61878afbf09e177029c1a91eb1e39ef244e6b3ef9efa/fastar-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac073576c1931959191cb20df38bab21dd152f66c940aa3ca8b22e39f753b2f3", size = 821354, upload-time = "2025-11-26T02:33:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/53/a8/12708ea4d21e3cf9f485b2a67d44ce84d949a6eddcc9aa5b3d324585ab43/fastar-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:003b59a7c3e405b6a7bff8fab17d31e0ccbc7f06730a8f8ca1694eeea75f3c76", size = 821626, upload-time = "2025-11-26T02:34:05.685Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c4/1b4d3347c7a759853f963410bf6baf42fe014d587c50c39c8e145f4bf1a0/fastar-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a7b96748425efd9fc155cd920d65088a1b0d754421962418ea73413d02ff515a", size = 986187, upload-time = "2025-11-26T02:34:52.047Z" }, + { url = "https://files.pythonhosted.org/packages/dc/59/2dbe0dc2570764475e60030403738faa261a9d3bff16b08629c378ab939a/fastar-0.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:90957a30e64418b02df5b4d525bea50403d98a4b1f29143ce5914ddfa7e54ee4", size = 1041536, upload-time = "2025-11-26T02:35:08.926Z" }, + { url = "https://files.pythonhosted.org/packages/d9/0f/639b295669c7ca6fbc2b4be2a7832aaeac1a5e06923f15a8a6d6daecbc7d/fastar-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f6e784a8015623fbb7ccca1af372fd82cb511b408ddd2348dc929fc6e415df73", size = 1047149, upload-time = "2025-11-26T02:35:26.597Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e7/23e3a19e06d261d1894f98eca9458f98c090c505a0c712dafc0ff1fc2965/fastar-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a03eaf287bbc93064688a1220580ce261e7557c8898f687f4d0b281c85b28d3c", size = 994992, upload-time = "2025-11-26T02:35:44.009Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7a/3ea4726bae3ac9358d02107ae48f3e10ee186dbed554af79e00b7b498c44/fastar-0.8.0-cp311-cp311-win32.whl", hash = "sha256:661a47ed90762f419406c47e802f46af63a08254ba96abd1c8191e4ce967b665", size = 456449, upload-time = "2025-11-26T02:36:25.291Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/0142bee993c431ee91cf5535e6e4b079ad491f620c215fcd79b7e5ffeb2b/fastar-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:b48abd6056fef7bc3d414aafb453c5b07fdf06d2df5a2841d650288a3aa1e9d3", size = 490863, upload-time = "2025-11-26T02:36:11.114Z" }, + { url = "https://files.pythonhosted.org/packages/3b/18/d119944f6bdbf6e722e204e36db86390ea45684a1bf6be6e3aa42abd471f/fastar-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:50c18788b3c6ffb85e176dcb8548bb8e54616a0519dcdbbfba66f6bbc4316933", size = 462230, upload-time = "2025-11-26T02:36:01.917Z" }, + { url = "https://files.pythonhosted.org/packages/58/f1/5b2ff898abac7f1a418284aad285e3a4f68d189c572ab2db0f6c9079dd16/fastar-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f10d2adfe40f47ff228f4efaa32d409d732ded98580e03ed37c9535b5fc923d", size = 706369, upload-time = "2025-11-26T02:34:37.783Z" }, + { url = "https://files.pythonhosted.org/packages/23/60/8046a386dca39154f80c927cbbeeb4b1c1267a3271bffe61552eb9995757/fastar-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b930da9d598e3bc69513d131f397e6d6be4643926ef3de5d33d1e826631eb036", size = 629097, upload-time = "2025-11-26T02:34:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/22/7e/1ae005addc789924a9268da2394d3bb5c6f96836f7e37b7e3d23c2362675/fastar-0.8.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9d210da2de733ca801de83e931012349d209f38b92d9630ccaa94bd445bdc9b8", size = 868938, upload-time = "2025-11-26T02:33:51.119Z" }, + { url = "https://files.pythonhosted.org/packages/a6/77/290a892b073b84bf82e6b2259708dfe79c54f356e252c2dd40180b16fe07/fastar-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa02270721517078a5bd61a38719070ac2537a4aa6b6c48cf369cf2abc59174a", size = 765204, upload-time = "2025-11-26T02:32:47.02Z" }, + { url = "https://files.pythonhosted.org/packages/d0/00/c3155171b976003af3281f5258189f1935b15d1221bfc7467b478c631216/fastar-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83c391e5b789a720e4d0029b9559f5d6dee3226693c5b39c0eab8eaece997e0f", size = 764717, upload-time = "2025-11-26T02:33:02.453Z" }, + { url = "https://files.pythonhosted.org/packages/b7/43/405b7ad76207b2c11b7b59335b70eac19e4a2653977f5588a1ac8fed54f4/fastar-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3258d7a78a72793cdd081545da61cabe85b1f37634a1d0b97ffee0ff11d105ef", size = 931502, upload-time = "2025-11-26T02:33:18.619Z" }, + { url = "https://files.pythonhosted.org/packages/da/8a/a3dde6d37cc3da4453f2845cdf16675b5686b73b164f37e2cc579b057c2c/fastar-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6eab95dd985cdb6a50666cbeb9e4814676e59cfe52039c880b69d67cfd44767", size = 821454, upload-time = "2025-11-26T02:33:33.427Z" }, + { url = "https://files.pythonhosted.org/packages/da/c1/904fe2468609c8990dce9fe654df3fbc7324a8d8e80d8240ae2c89757064/fastar-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:829b1854166141860887273c116c94e31357213fa8e9fe8baeb18bd6c38aa8d9", size = 821647, upload-time = "2025-11-26T02:34:07Z" }, + { url = "https://files.pythonhosted.org/packages/c8/73/a0642ab7a400bc07528091785e868ace598fde06fcd139b8f865ec1b6f3c/fastar-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1667eae13f9457a3c737f4376d68e8c3e548353538b28f7e4273a30cb3965cd", size = 986342, upload-time = "2025-11-26T02:34:53.371Z" }, + { url = "https://files.pythonhosted.org/packages/af/af/60c1bfa6edab72366461a95f053d0f5f7ab1825fe65ca2ca367432cd8629/fastar-0.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b864a95229a7db0814cd9ef7987cb713fd43dce1b0d809dd17d9cd6f02fdde3e", size = 1040207, upload-time = "2025-11-26T02:35:10.65Z" }, + { url = "https://files.pythonhosted.org/packages/f6/a0/0d624290dec622e7fa084b6881f456809f68777d54a314f5dde932714506/fastar-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c05fbc5618ce17675a42576fa49858d79734627f0a0c74c0875ab45ee8de340c", size = 1045031, upload-time = "2025-11-26T02:35:28.108Z" }, + { url = "https://files.pythonhosted.org/packages/a7/74/cf663af53c4706ba88e6b4af44a6b0c3bd7d7ca09f079dc40647a8f06585/fastar-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7f41c51ee96f338662ee3c3df4840511ba3f9969606840f1b10b7cb633a3c716", size = 994877, upload-time = "2025-11-26T02:35:45.797Z" }, + { url = "https://files.pythonhosted.org/packages/52/17/444c8be6e77206050e350da7c338102b6cab384be937fa0b1d6d1f9ede73/fastar-0.8.0-cp312-cp312-win32.whl", hash = "sha256:d949a1a2ea7968b734632c009df0571c94636a5e1622c87a6e2bf712a7334f47", size = 455996, upload-time = "2025-11-26T02:36:26.938Z" }, + { url = "https://files.pythonhosted.org/packages/dc/34/fc3b5e56d71a17b1904800003d9251716e8fd65f662e1b10a26881698a74/fastar-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:fc645994d5b927d769121094e8a649b09923b3c13a8b0b98696d8f853f23c532", size = 490429, upload-time = "2025-11-26T02:36:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/35/a8/5608cc837417107c594e2e7be850b9365bcb05e99645966a5d6a156285fe/fastar-0.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:d81ee82e8dc78a0adb81728383bd39611177d642a8fa2d601d4ad5ad59e5f3bd", size = 461297, upload-time = "2025-11-26T02:36:03.546Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a5/79ecba3646e22d03eef1a66fb7fc156567213e2e4ab9faab3bbd4489e483/fastar-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a3253a06845462ca2196024c7a18f5c0ba4de1532ab1c4bad23a40b332a06a6a", size = 706112, upload-time = "2025-11-26T02:34:39.237Z" }, + { url = "https://files.pythonhosted.org/packages/0a/03/4f883bce878218a8676c2d7ca09b50c856a5470bb3b7f63baf9521ea6995/fastar-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5cbeb3ebfa0980c68ff8b126295cc6b208ccd81b638aebc5a723d810a7a0e5d2", size = 628954, upload-time = "2025-11-26T02:34:23.705Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f1/892e471f156b03d10ba48ace9384f5a896702a54506137462545f38e40b8/fastar-0.8.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1c0d5956b917daac77d333d48b3f0f3ff927b8039d5b32d8125462782369f761", size = 868685, upload-time = "2025-11-26T02:33:53.077Z" }, + { url = "https://files.pythonhosted.org/packages/39/ba/e24915045852e30014ec6840446975c03f4234d1c9270394b51d3ad18394/fastar-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27b404db2b786b65912927ce7f3790964a4bcbde42cdd13091b82a89cd655e1c", size = 765044, upload-time = "2025-11-26T02:32:48.187Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/1aa11ac21a99984864c2fca4994e094319ff3a2046e7a0343c39317bd5b9/fastar-0.8.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0902fc89dcf1e7f07b8563032a4159fe2b835e4c16942c76fd63451d0e5f76a3", size = 764322, upload-time = "2025-11-26T02:33:03.859Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f0/4b91902af39fe2d3bae7c85c6d789586b9fbcf618d7fdb3d37323915906d/fastar-0.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:069347e2f0f7a8b99bbac8cd1bc0e06c7b4a31dc964fc60d84b95eab3d869dc1", size = 931016, upload-time = "2025-11-26T02:33:19.902Z" }, + { url = "https://files.pythonhosted.org/packages/c9/97/8fc43a5a9c0a2dc195730f6f7a0f367d171282cd8be2511d0e87c6d2dad0/fastar-0.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fd135306f6bfe9a835918280e0eb440b70ab303e0187d90ab51ca86e143f70d", size = 821308, upload-time = "2025-11-26T02:33:34.664Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e9/058615b63a7fd27965e8c5966f393ed0c169f7ff5012e1674f21684de3ba/fastar-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d06d6897f43c27154b5f2d0eb930a43a81b7eec73f6f0b0114814d4a10ab38", size = 821171, upload-time = "2025-11-26T02:34:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/ca/cf/69e16a17961570a755c37ffb5b5aa7610d2e77807625f537989da66f2a9d/fastar-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a922f8439231fa0c32b15e8d70ff6d415619b9d40492029dabbc14a0c53b5f18", size = 986227, upload-time = "2025-11-26T02:34:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/fb/83/2100192372e59b56f4ace37d7d9cabda511afd71b5febad1643d1c334271/fastar-0.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a739abd51eb766384b4caff83050888e80cd75bbcfec61e6d1e64875f94e4a40", size = 1039395, upload-time = "2025-11-26T02:35:12.166Z" }, + { url = "https://files.pythonhosted.org/packages/75/15/cdd03aca972f55872efbb7cf7540c3fa7b97a75d626303a3ea46932163dc/fastar-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5a65f419d808b23ac89d5cd1b13a2f340f15bc5d1d9af79f39fdb77bba48ff1b", size = 1044766, upload-time = "2025-11-26T02:35:29.62Z" }, + { url = "https://files.pythonhosted.org/packages/3d/29/945e69e4e2652329ace545999334ec31f1431fbae3abb0105587e11af2ae/fastar-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7bb2ae6c0cce58f0db1c9f20495e7557cca2c1ee9c69bbd90eafd54f139171c5", size = 994740, upload-time = "2025-11-26T02:35:47.887Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5d/dbfe28f8cd1eb484bba0c62e5259b2cf6fea229d6ef43e05c06b5a78c034/fastar-0.8.0-cp313-cp313-win32.whl", hash = "sha256:b28753e0d18a643272597cb16d39f1053842aa43131ad3e260c03a2417d38401", size = 455990, upload-time = "2025-11-26T02:36:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/e1/01/e965740bd36e60ef4c5aa2cbe42b6c4eb1dc3551009238a97c2e5e96bd23/fastar-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:620e5d737dce8321d49a5ebb7997f1fd0047cde3512082c27dc66d6ac8c1927a", size = 490227, upload-time = "2025-11-26T02:36:14.363Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/c99202719b83e5249f26902ae53a05aea67d840eeb242019322f20fc171c/fastar-0.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:c4c4bd08df563120cd33e854fe0a93b81579e8571b11f9b7da9e84c37da2d6b6", size = 461078, upload-time = "2025-11-26T02:36:04.94Z" }, + { url = "https://files.pythonhosted.org/packages/96/4a/9573b87a0ef07580ed111e7230259aec31bb33ca3667963ebee77022ec61/fastar-0.8.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:50b36ce654ba44b0e13fae607ae17ee6e1597b69f71df1bee64bb8328d881dfc", size = 706041, upload-time = "2025-11-26T02:34:40.638Z" }, + { url = "https://files.pythonhosted.org/packages/4a/19/f95444a1d4f375333af49300aa75ee93afa3335c0e40fda528e460ed859c/fastar-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:63a892762683d7ab00df0227d5ea9677c62ff2cde9b875e666c0be569ed940f3", size = 628617, upload-time = "2025-11-26T02:34:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c9/b51481b38b7e3f16ef2b9e233b1a3623386c939d745d6e41bbd389eaae30/fastar-0.8.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4ae6a145c1bff592644bde13f2115e0239f4b7babaf506d14e7d208483cf01a5", size = 869299, upload-time = "2025-11-26T02:33:54.274Z" }, + { url = "https://files.pythonhosted.org/packages/bf/02/3ba1267ee5ba7314e29c431cf82eaa68586f2c40cdfa08be3632b7d07619/fastar-0.8.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ae0ff7c0a1c7e1428404b81faee8aebef466bfd0be25bfe4dabf5d535c68741", size = 764667, upload-time = "2025-11-26T02:32:49.606Z" }, + { url = "https://files.pythonhosted.org/packages/1b/84/bf33530fd015b5d7c2cc69e0bce4a38d736754a6955487005aab1af6adcd/fastar-0.8.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbfd87dbd217b45c898b2dbcd0169aae534b2c1c5cbe3119510881f6a5ac8ef5", size = 763993, upload-time = "2025-11-26T02:33:05.782Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/9564d24e7cea6321a8d921c6d2a457044a476ef197aa4708e179d3d97f0d/fastar-0.8.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5abd99fcba83ef28c8fe6ae2927edc79053db43a0457a962ed85c9bf150d37", size = 930153, upload-time = "2025-11-26T02:33:21.53Z" }, + { url = "https://files.pythonhosted.org/packages/35/b1/6f57fcd8d6e192cfebf97e58eb27751640ad93784c857b79039e84387b51/fastar-0.8.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91d4c685620c3a9d6b5ae091dbabab4f98b20049b7ecc7976e19cc9016c0d5d6", size = 821177, upload-time = "2025-11-26T02:33:35.839Z" }, + { url = "https://files.pythonhosted.org/packages/b3/78/9e004ea9f3aa7466f5ddb6f9518780e1d2f0ed3ca55f093632982598bace/fastar-0.8.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f77c2f2cad76e9dc7b6701297adb1eba87d0485944b416fc2ccf5516c01219a3", size = 820652, upload-time = "2025-11-26T02:34:09.776Z" }, + { url = "https://files.pythonhosted.org/packages/42/95/b604ed536544005c9f1aee7c4c74b00150db3d8d535cd8232dc20f947063/fastar-0.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e7f07c4a3dada7757a8fc430a5b4a29e6ef696d2212747213f57086ffd970316", size = 985961, upload-time = "2025-11-26T02:34:56.401Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7b/fa9d4d96a5d494bdb8699363bb9de8178c0c21a02e1d89cd6f913d127018/fastar-0.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:90c0c3fe55105c0aed8a83135dbdeb31e683455dbd326a1c48fa44c378b85616", size = 1039316, upload-time = "2025-11-26T02:35:13.807Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f9/8462789243bc3f33e8401378ec6d54de4e20cfa60c96a0e15e3e9d1389bb/fastar-0.8.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fb9ee51e5bffe0dab3d3126d3a4fac8d8f7235cedcb4b8e74936087ce1c157f3", size = 1045028, upload-time = "2025-11-26T02:35:31.079Z" }, + { url = "https://files.pythonhosted.org/packages/a5/71/9abb128777e616127194b509e98fcda3db797d76288c1a8c23dd22afc14f/fastar-0.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e380b1e8d30317f52406c43b11e98d11e1d68723bbd031e18049ea3497b59a6d", size = 994677, upload-time = "2025-11-26T02:35:49.391Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/b81b3f194853d7ad232a67a1d768f5f51a016f165cfb56cb31b31bbc6177/fastar-0.8.0-cp314-cp314-win32.whl", hash = "sha256:1c4ffc06e9c4a8ca498c07e094670d8d8c0d25b17ca6465b9774da44ea997ab1", size = 456687, upload-time = "2025-11-26T02:36:30.205Z" }, + { url = "https://files.pythonhosted.org/packages/cb/87/9e0cd4768a98181d56f0cdbab2363404cc15deb93f4aad3b99cd2761bbaa/fastar-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:5517a8ad4726267c57a3e0e2a44430b782e00b230bf51c55b5728e758bb3a692", size = 490578, upload-time = "2025-11-26T02:36:16.218Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1e/580a76cf91847654f2ad6520e956e93218f778540975bc4190d363f709e2/fastar-0.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:58030551046ff4a8616931e52a36c83545ff05996db5beb6e0cd2b7e748aa309", size = 461473, upload-time = "2025-11-26T02:36:06.373Z" }, + { url = "https://files.pythonhosted.org/packages/58/4c/bdb5c6efe934f68708529c8c9d4055ebef5c4be370621966438f658b29bd/fastar-0.8.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:1e7d29b6bfecb29db126a08baf3c04a5ab667f6cea2b7067d3e623a67729c4a6", size = 705570, upload-time = "2025-11-26T02:34:42.01Z" }, + { url = "https://files.pythonhosted.org/packages/6d/78/f01ac7e71d5a37621bd13598a26e948a12b85ca8042f7ee1a0a8c9f59cda/fastar-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05eb7b96940f9526b485f1d0b02393839f0f61cac4b1f60024984f8b326d2640", size = 627761, upload-time = "2025-11-26T02:34:26.152Z" }, + { url = "https://files.pythonhosted.org/packages/06/45/6df0ecda86ea9d2e95053c1a655d153dee55fc121b6e13ea6d1e246a50b6/fastar-0.8.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:619352d8ac011794e2345c462189dc02ba634750d23cd9d86a9267dd71b1f278", size = 869414, upload-time = "2025-11-26T02:33:55.618Z" }, + { url = "https://files.pythonhosted.org/packages/b2/72/486421f5a8c0c377cc82e7a50c8a8ea899a6ec2aa72bde8f09fb667a2dc8/fastar-0.8.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74ebfecef3fe6d7a90355fac1402fd30636988332a1d33f3e80019a10782bb24", size = 763863, upload-time = "2025-11-26T02:32:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/39f654dbb41a3867fb1f2c8081c014d8f1d32ea10585d84cacbef0b32995/fastar-0.8.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2975aca5a639e26a3ab0d23b4b0628d6dd6d521146c3c11486d782be621a35aa", size = 763065, upload-time = "2025-11-26T02:33:07.274Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bd/c011a34fb3534c4c3301f7c87c4ffd7e47f6113c904c092ddc8a59a303ea/fastar-0.8.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afc438eaed8ff0dcdd9308268be5cb38c1db7e94c3ccca7c498ca13a4a4535a3", size = 930530, upload-time = "2025-11-26T02:33:23.117Z" }, + { url = "https://files.pythonhosted.org/packages/55/9d/aa6e887a7033c571b1064429222bbe09adc9a3c1e04f3d1788ba5838ebd5/fastar-0.8.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ced0a5399cc0a84a858ef0a31ca2d0c24d3bbec4bcda506a9192d8119f3590a", size = 820572, upload-time = "2025-11-26T02:33:37.542Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9c/7a3a2278a1052e1a5d98646de7c095a00cffd2492b3b84ce730e2f1cd93a/fastar-0.8.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec9b23da8c4c039da3fe2e358973c66976a0c8508aa06d6626b4403cb5666c19", size = 820649, upload-time = "2025-11-26T02:34:11.108Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/d38edc1f4438cd047e56137c26d94783ffade42e1b3bde620ccf17b771ef/fastar-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:dfba078fcd53478032fd0ceed56960ec6b7ff0511cfc013a8a3a4307e3a7bac4", size = 985653, upload-time = "2025-11-26T02:34:57.884Z" }, + { url = "https://files.pythonhosted.org/packages/69/d9/2147d0c19757e165cd62d41cec3f7b38fad2ad68ab784978b5f81716c7ea/fastar-0.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:ade56c94c14be356d295fecb47a3fcd473dd43a8803ead2e2b5b9e58feb6dcfa", size = 1038140, upload-time = "2025-11-26T02:35:15.778Z" }, + { url = "https://files.pythonhosted.org/packages/7f/1d/ec4c717ffb8a308871e9602ec3197d957e238dc0227127ac573ec9bca952/fastar-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e48d938f9366db5e59441728f70b7f6c1ccfab7eff84f96f9b7e689b07786c52", size = 1045195, upload-time = "2025-11-26T02:35:32.865Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/637334dc8c8f3bb391388b064ae13f0ad9402bc5a6c3e77b8887d0c31921/fastar-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:79c441dc1482ff51a54fb3f57ae6f7bb3d2cff88fa2cc5d196c519f8aab64a56", size = 994686, upload-time = "2025-11-26T02:35:51.392Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e2/dfa19a4b260b8ab3581b7484dcb80c09b25324f4daa6b6ae1c7640d1607a/fastar-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:187f61dc739afe45ac8e47ed7fd1adc45d52eac110cf27d579155720507d6fbe", size = 455767, upload-time = "2025-11-26T02:36:34.758Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/df65c72afc1297797b255f90c4778b5d6f1f0f80282a134d5ab610310ed9/fastar-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:40e9d763cf8bf85ce2fa256e010aa795c0fe3d3bd1326d5c3084e6ce7857127e", size = 489971, upload-time = "2025-11-26T02:36:22.081Z" }, + { url = "https://files.pythonhosted.org/packages/85/11/0aa8455af26f0ae89e42be67f3a874255ee5d7f0f026fc86e8d56f76b428/fastar-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e59673307b6a08210987059a2bdea2614fe26e3335d0e5d1a3d95f49a05b1418", size = 460467, upload-time = "2025-11-26T02:36:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/98/6e/6c46aa7f8c8734e7f96ee5141acd3877667ce66f34eea10703aa7571d191/fastar-0.8.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:998e3fa4b555b63eb134e6758437ed739ad1652fdd2a61dfe1dacbfddc35fe66", size = 710662, upload-time = "2025-11-26T02:34:47.593Z" }, + { url = "https://files.pythonhosted.org/packages/70/27/fd622442f2fbd4ff5459677987481ef1c60e077cb4e63a2ed4d8dce6f869/fastar-0.8.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5f83e60d845091f3a12bc37f412774264d161576eaf810ed8b43567eb934b7e5", size = 634049, upload-time = "2025-11-26T02:34:32.365Z" }, + { url = "https://files.pythonhosted.org/packages/8f/ee/aa4d08aea25b5419a7277132e738ab1cd775f26aebddce11413b07e2fdff/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:299672e1c74d8b73c61684fac9159cfc063d35f4b165996a88facb0e26862cb5", size = 872055, upload-time = "2025-11-26T02:34:01.377Z" }, + { url = "https://files.pythonhosted.org/packages/92/9a/2bf2f77aade575e67997e0c759fd55cb1c66b7a5b437b1cd0e97d8b241bc/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3d3a27066b84d015deab5faee78565509bb33b137896443e4144cb1be1a5f90", size = 766787, upload-time = "2025-11-26T02:32:57.161Z" }, + { url = "https://files.pythonhosted.org/packages/0b/90/23a3f6c252f11b10c70f854bce09abc61f71b5a0e6a4b0eac2bcb9a2c583/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef0bcf4385bbdd3c1acecce2d9ea7dab7cc9b8ee0581bbccb7ab11908a7ce288", size = 766861, upload-time = "2025-11-26T02:33:12.824Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/beeb9078380acd4484db5c957d066171695d9340e3526398eb230127b0c2/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f10ef62b6eda6cb6fd9ba8e1fe08a07d7b2bdcc8eaa00eb91566143b92ed7eee", size = 932667, upload-time = "2025-11-26T02:33:28.405Z" }, + { url = "https://files.pythonhosted.org/packages/f4/6d/b034cc637bd0ee638d5a85d08e941b0b8ffd44cf391fb751ba98233734f7/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4f6c82a8ee98c17aa48585ee73b51c89c1b010e5c951af83e07c3436180e3fc", size = 822712, upload-time = "2025-11-26T02:33:44.27Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2b/7d183c63f59227c4689792042d6647f2586a5e7273b55e81745063088d81/fastar-0.8.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6129067fcb86276635b5857010f4e9b9c7d5d15dd571bb03c6c1ed73c40fd92", size = 822659, upload-time = "2025-11-26T02:34:16.815Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f9/716e0cd9de2427fdf766bc68176f76226cd01fffef3a56c5046fa863f5f0/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4cc9e77019e489f1ddac446b6a5b9dfb5c3d9abd142652c22a1d9415dbcc0e47", size = 987412, upload-time = "2025-11-26T02:35:04.259Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b9/9a8c3fd59958c1c8027bc075af11722cdc62c4968bb277e841d131232289/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:382bfe82c026086487cb17fee12f4c1e2b4e67ce230f2e04487d3e7ddfd69031", size = 1042911, upload-time = "2025-11-26T02:35:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/e2/2f/c3f30963b47022134b8a231c12845f4d7cfba520f59bbc1a82468aea77c7/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:908d2b9a1ff3d549cc304b32f95706a536da8f0bcb0bc0f9e4c1cce39b80e218", size = 1047464, upload-time = "2025-11-26T02:35:39.376Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/218ab6d9a2bab3b07718e6cd8405529600edc1e9c266320e8524c8f63251/fastar-0.8.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1aa7dbde2d2d73eb5b6203d0f74875cb66350f0f1b4325b4839fc8fbbf5d074e", size = 997309, upload-time = "2025-11-26T02:35:57.722Z" }, +] + +[[package]] +name = "filelock" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "flatbuffers" +version = "25.9.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" }, +] + +[[package]] +name = "flo-ai" +version = "1.1.0rc5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "anthropic" }, + { name = "chardet" }, + { name = "cryptography" }, + { name = "google-cloud-aiplatform" }, + { name = "google-genai" }, + { name = "httpx" }, + { name = "openai" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-sdk" }, + { name = "pydantic" }, + { name = "pyjwt" }, + { name = "pymupdf4llm" }, + { name = "pypdf" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/1d/3ed8c9e8f140ccf37c3e8b4a2676ffe3da74e2a43ab46c5f501765fd7f4f/flo_ai-1.1.0rc5.tar.gz", hash = "sha256:418d73a1ef7d1a41fa8c039037628985fb3b9cdc3918acb4e84587e8b70c519c", size = 83464, upload-time = "2025-12-02T14:19:35.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/d8/07806187b0a3ff8debe08d06881222c39bd5087fd868c151ac2ab8f54099/flo_ai-1.1.0rc5-py3-none-any.whl", hash = "sha256:e7e1929a45333c8c3584bbe6252e138dcd9d3c12036762d1b875f9414852209c", size = 106332, upload-time = "2025-12-02T14:19:33.686Z" }, +] + +[[package]] +name = "flo-cloud" +version = "0.1.0" +source = { editable = "packages/flo_cloud" } +dependencies = [ + { name = "boto3" }, + { name = "cryptography" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-kms" }, + { name = "google-cloud-pubsub" }, + { name = "google-cloud-storage" }, + { name = "redshift-connector" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = "<=1.38.40" }, + { name = "cryptography", specifier = ">=45.0.4" }, + { name = "google-cloud-bigquery", specifier = "==3.34.0" }, + { name = "google-cloud-kms", specifier = ">=3.5.1" }, + { name = "google-cloud-pubsub", specifier = ">=2.28.0" }, + { name = "google-cloud-storage", specifier = "<3.0.0" }, + { name = "redshift-connector", specifier = ">=2.1.7" }, +] + +[[package]] +name = "flo-utils" +version = "0.1.0" +source = { editable = "packages/flo_utils" } +dependencies = [ + { name = "boto3" }, + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "google-cloud-kms" }, + { name = "pyyaml" }, + { name = "tenacity" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = "<=1.38.40" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "google-cloud-kms", specifier = ">=3.5.1" }, + { name = "pyyaml", specifier = ">=6.0.3,<7" }, + { name = "tenacity", specifier = ">=8.4.1" }, +] + +[[package]] +name = "floconsole" +version = "0.1.0" +source = { editable = "apps/floconsole" } +dependencies = [ + { name = "alembic" }, + { name = "async-lru" }, + { name = "bcrypt" }, + { name = "common-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "flo-cloud" }, + { name = "httpx" }, + { name = "psycopg", extra = ["binary", "pool"] }, + { name = "psycopg2" }, + { name = "python-dotenv" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "sqlalchemy" }, + { name = "uvicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "alembic", specifier = ">=1.15.2,<2.0.0" }, + { name = "async-lru", specifier = ">=2.0.5" }, + { name = "bcrypt", specifier = ">=4.2.1,<5.0.0" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "dependency-injector", specifier = ">=4.46.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "httpx", specifier = ">=0.28.1,<1.0.0" }, + { name = "psycopg", extras = ["binary", "pool"], specifier = ">=3.2.3,<4.0.0" }, + { name = "psycopg2", specifier = ">=2.9.10,<3.0.0" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, + { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0,<4.0.0" }, + { name = "sqlalchemy", specifier = ">=2.0.40,<3.0.0" }, + { name = "uvicorn", specifier = ">=0.30.5,<1.0.0" }, +] + +[[package]] +name = "floware" +version = "0.1.0" +source = { editable = "apps/floware" } +dependencies = [ + { name = "agents-module" }, + { name = "api-services-module" }, + { name = "auth-module" }, + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "gold-module" }, + { name = "image-search-module" }, + { name = "inference-module" }, + { name = "insights-module" }, + { name = "knowledge-base-module" }, + { name = "llm-inference-config-module" }, + { name = "plugins-module" }, + { name = "pottery" }, + { name = "product-analysis-module" }, + { name = "python-dotenv" }, + { name = "python-multipart" }, + { name = "tools-module" }, + { name = "user-management-module" }, + { name = "uvicorn" }, + { name = "voice-agents-module" }, +] + +[package.metadata] +requires-dist = [ + { name = "agents-module", editable = "modules/agents_module" }, + { name = "api-services-module", editable = "modules/api_services_module" }, + { name = "auth-module", editable = "modules/auth_module" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "gold-module", editable = "modules/gold_module" }, + { name = "image-search-module", editable = "modules/image_search_module" }, + { name = "inference-module", editable = "modules/inference_module" }, + { name = "insights-module", editable = "modules/insights_module" }, + { name = "knowledge-base-module", editable = "modules/knowledge_base_module" }, + { name = "llm-inference-config-module", editable = "modules/llm_inference_config_module" }, + { name = "plugins-module", editable = "modules/plugins_module" }, + { name = "pottery", specifier = ">=3.0.1,<4.0.0" }, + { name = "product-analysis-module", editable = "modules/product_analysis_module" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, + { name = "python-multipart", specifier = "==0.0.9" }, + { name = "tools-module", editable = "modules/tools_module" }, + { name = "user-management-module", editable = "modules/user_management_module" }, + { name = "uvicorn", specifier = ">=0.30.1,<1.0.0" }, + { name = "voice-agents-module", editable = "modules/voice_agents_module" }, +] + +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, +] + +[[package]] +name = "fsspec" +version = "2025.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19", size = 304847, upload-time = "2025-09-02T19:10:49.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7", size = 199289, upload-time = "2025-09-02T19:10:47.708Z" }, +] + +[[package]] +name = "future" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490, upload-time = "2024-02-21T11:52:38.461Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326, upload-time = "2024-02-21T11:52:35.956Z" }, +] + +[[package]] +name = "gold-module" +version = "0.1.0" +source = { editable = "modules/gold_module" } +dependencies = [ + { name = "common-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "google-cloud-pubsub" }, + { name = "google-cloud-storage" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "google-cloud-pubsub", specifier = ">=2.29.0,<3.0.0" }, + { name = "google-cloud-storage", specifier = "<3.0.0" }, +] + +[[package]] +name = "google-api-core" +version = "2.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/21/e9d043e88222317afdbdb567165fdbc3b0aad90064c7e0c9eb0ad9955ad8/google_api_core-2.25.1.tar.gz", hash = "sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8", size = 165443, upload-time = "2025-06-12T20:52:20.439Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/4b/ead00905132820b623732b175d66354e9d3e69fcf2a5dcdab780664e7896/google_api_core-2.25.1-py3-none-any.whl", hash = "sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7", size = 160807, upload-time = "2025-06-12T20:52:19.334Z" }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + +[[package]] +name = "google-api-python-client" +version = "2.183.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "httplib2" }, + { name = "uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/1f/49a2c83fc6dcd8b127cc9efbecf7d5fc36109c2028ba22ed6cb4d072fca4/google_api_python_client-2.183.0.tar.gz", hash = "sha256:abae37e04fecf719388e5c02f707ed9cdf952f10b217c79a3e76c636762e3ea9", size = 13645623, upload-time = "2025-09-23T22:27:00.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/06/1974f937172854bc7622eff5c2390f33542ceb843f305922922c8f5f7f17/google_api_python_client-2.183.0-py3-none-any.whl", hash = "sha256:2005b6e86c27be1db1a43f43e047a0f8e004159f3cceddecb08cf1624bddba31", size = 14214837, upload-time = "2025-09-23T22:26:57.758Z" }, +] + +[[package]] +name = "google-auth" +version = "2.41.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/c5/87742f5b5f055514c67f970f7174a876fccff2289a69d460b0614cc7ccfb/google_auth-2.41.0.tar.gz", hash = "sha256:c9d7b534ea4a5d9813c552846797fafb080312263cd4994d6622dd50992ae101", size = 292282, upload-time = "2025-09-29T21:36:35.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/ff/a1c426fc9bea7268230bf92340da7d112fae18cf946cafe13ab17d14e6ee/google_auth-2.41.0-py2.py3-none-any.whl", hash = "sha256:d8bed9b53ab63b7b0374656b8e1bef051f95bb14ecc0cf21ba49de7911d62e09", size = 221168, upload-time = "2025-09-29T21:36:33.925Z" }, +] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "httplib2" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842, upload-time = "2023-12-12T17:40:30.722Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253, upload-time = "2023-12-12T17:40:13.055Z" }, +] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.117.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docstring-parser" }, + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-resource-manager" }, + { name = "google-cloud-storage" }, + { name = "google-genai" }, + { name = "packaging" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "shapely" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b4/73/7ac970c14b6118eb1b29189bfb07f8ee20d0d75ffa2a44d3acf82dcae286/google_cloud_aiplatform-1.117.0.tar.gz", hash = "sha256:93d397b85559d136d2cdd4b569defce593e11dabd307779272480f015efe6378", size = 9659839, upload-time = "2025-09-25T21:11:26.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/a5/61f3615aecd5bdb502f4b095b102695da2f70ed7061b643c56c6f6e3482f/google_cloud_aiplatform-1.117.0-py2.py3-none-any.whl", hash = "sha256:3d7a5346bcceaa9b8fac35a7cc3e180ce89900a15f3c8e91404e80ccb913ac7f", size = 8038334, upload-time = "2025-09-25T21:11:22.973Z" }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-resumable-media" }, + { name = "packaging" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/f9/e9da2d56d7028f05c0e2f5edf6ce43c773220c3172666c3dd925791d763d/google_cloud_bigquery-3.34.0.tar.gz", hash = "sha256:5ee1a78ba5c2ccb9f9a8b2bf3ed76b378ea68f49b6cac0544dc55cc97ff7c1ce", size = 489091, upload-time = "2025-05-29T17:18:06.03Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/7e/7115c4f67ca0bc678f25bff1eab56cc37d06eb9a3978940b2ebd0705aa0a/google_cloud_bigquery-3.34.0-py3-none-any.whl", hash = "sha256:de20ded0680f8136d92ff5256270b5920dfe4fae479f5d0f73e90e5df30b1cf7", size = 253555, upload-time = "2025-05-29T17:18:02.904Z" }, +] + +[[package]] +name = "google-cloud-core" +version = "2.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/b8/2b53838d2acd6ec6168fd284a990c76695e84c65deee79c9f3a4276f6b4f/google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53", size = 35861, upload-time = "2025-03-10T21:05:38.948Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/86/bda7241a8da2d28a754aad2ba0f6776e35b67e37c36ae0c45d49370f1014/google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e", size = 29348, upload-time = "2025-03-10T21:05:37.785Z" }, +] + +[[package]] +name = "google-cloud-kms" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/73/8a/ee0569d6dd4f4bb99fdf2a28466f1914a3f2849506c75b620c213feb043b/google_cloud_kms-3.6.0.tar.gz", hash = "sha256:c0f7f2474e35e99e6a36651520a26fdba4bb8e73b7cd0d9bff8c8bd92737afcc", size = 329923, upload-time = "2025-09-22T16:51:16.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/82/686bde4666b72e821f724dc2d143ad0b9a50af7c145ecba1e57ccb9aeb97/google_cloud_kms-3.6.0-py3-none-any.whl", hash = "sha256:1dc389f327cce288ac3091860497ab50b2d166fb71603e632bc4bfd1b2b74cd5", size = 272683, upload-time = "2025-09-22T16:51:00.132Z" }, +] + +[[package]] +name = "google-cloud-pubsub" +version = "2.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "grpcio" }, + { name = "grpcio-status" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/b4/cc75714db4ae34d1cfd63ed0dc08a1d0f8b45254b1d205d46b8686ae80c9/google_cloud_pubsub-2.30.0.tar.gz", hash = "sha256:26975ed728d2209479f4fb2667e3b3bd70de7605abadf4b301c651c39ed83042", size = 390598, upload-time = "2025-06-09T14:04:26.61Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/53/680efad915c221a02ce8d0bfdb9e72e9cad8e87923dc479e33edff56fc34/google_cloud_pubsub-2.30.0-py3-none-any.whl", hash = "sha256:bd7adabc1b79ba2220529401e34dfd72bfdbdad9132a697b4200012a49281629", size = 318420, upload-time = "2025-06-10T02:13:09.581Z" }, +] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6e/ca/a4648f5038cb94af4b3942815942a03aa9398f9fb0bef55b3f1585b9940d/google_cloud_resource_manager-1.14.2.tar.gz", hash = "sha256:962e2d904c550d7bac48372607904ff7bb3277e3bb4a36d80cc9a37e28e6eb74", size = 446370, upload-time = "2025-03-17T11:35:56.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/ea/a92631c358da377af34d3a9682c97af83185c2d66363d5939ab4a1169a7f/google_cloud_resource_manager-1.14.2-py3-none-any.whl", hash = "sha256:d0fa954dedd1d2b8e13feae9099c01b8aac515b648e612834f9942d2795a9900", size = 394344, upload-time = "2025-03-17T11:35:54.722Z" }, +] + +[[package]] +name = "google-cloud-speech" +version = "2.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpcio" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/c2/500c58a7e3008cb77da01a2f2a8284ac55c808545d18551c62a031ff548d/google_cloud_speech-2.34.0.tar.gz", hash = "sha256:2a7bffd84f134b9b70c9f11cbb5088c534f92be149d71d9073d0b9dd3a431acf", size = 391496, upload-time = "2025-10-20T14:57:17.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/4c/8c52951a4078f4b181917c37a2610e69c0b24a10567d0182bf089a933c35/google_cloud_speech-2.34.0-py3-none-any.whl", hash = "sha256:cc0c6c0fda9306fee01c998bc207b68f71e0a3247121a5a3a27daabacd3a8c98", size = 336614, upload-time = "2025-10-20T14:54:05.004Z" }, +] + +[[package]] +name = "google-cloud-storage" +version = "2.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/76/4d965702e96bb67976e755bed9828fa50306dca003dbee08b67f41dd265e/google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2", size = 5535488, upload-time = "2024-12-05T01:35:06.49Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba", size = 131787, upload-time = "2024-12-05T01:35:04.736Z" }, +] + +[[package]] +name = "google-cloud-texttospeech" +version = "2.33.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpcio" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/b0/326f73a5499f06dacb8a847a78020ebe7c89e0f81896d5cee9093b2d5367/google_cloud_texttospeech-2.33.0.tar.gz", hash = "sha256:311157b74210cc4777f148ab6a62bc2cc24b3f52a7dc106c0a0223c1830b618f", size = 185291, upload-time = "2025-10-23T16:28:55.507Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/c6/59abc67822cbb2bd48b769c085531fffaf0d0d87420216207dc6cadd961c/google_cloud_texttospeech-2.33.0-py3-none-any.whl", hash = "sha256:a97273fc0e32972f8d262a9cb4a134eb9f1300518cae3612ff80053a5eef1f44", size = 192172, upload-time = "2025-10-23T16:28:46.891Z" }, +] + +[[package]] +name = "google-crc32c" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495, upload-time = "2025-03-26T14:29:13.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468, upload-time = "2025-03-26T14:32:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313, upload-time = "2025-03-26T14:57:38.758Z" }, + { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048, upload-time = "2025-03-26T14:41:30.679Z" }, + { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669, upload-time = "2025-03-26T14:41:31.432Z" }, + { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476, upload-time = "2025-03-26T14:29:10.211Z" }, + { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470, upload-time = "2025-03-26T14:34:31.655Z" }, + { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315, upload-time = "2025-03-26T15:01:54.634Z" }, + { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180, upload-time = "2025-03-26T14:41:32.168Z" }, + { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794, upload-time = "2025-03-26T14:41:33.264Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477, upload-time = "2025-03-26T14:29:10.94Z" }, + { url = "https://files.pythonhosted.org/packages/8b/72/b8d785e9184ba6297a8620c8a37cf6e39b81a8ca01bb0796d7cbb28b3386/google_crc32c-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:df8b38bdaf1629d62d51be8bdd04888f37c451564c2042d36e5812da9eff3c35", size = 30467, upload-time = "2025-03-26T14:36:06.909Z" }, + { url = "https://files.pythonhosted.org/packages/34/25/5f18076968212067c4e8ea95bf3b69669f9fc698476e5f5eb97d5b37999f/google_crc32c-1.7.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:e42e20a83a29aa2709a0cf271c7f8aefaa23b7ab52e53b322585297bb94d4638", size = 30309, upload-time = "2025-03-26T15:06:15.318Z" }, + { url = "https://files.pythonhosted.org/packages/92/83/9228fe65bf70e93e419f38bdf6c5ca5083fc6d32886ee79b450ceefd1dbd/google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:905a385140bf492ac300026717af339790921f411c0dfd9aa5a9e69a08ed32eb", size = 33133, upload-time = "2025-03-26T14:41:34.388Z" }, + { url = "https://files.pythonhosted.org/packages/c3/ca/1ea2fd13ff9f8955b85e7956872fdb7050c4ace8a2306a6d177edb9cf7fe/google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b211ddaf20f7ebeec5c333448582c224a7c90a9d98826fbab82c0ddc11348e6", size = 32773, upload-time = "2025-03-26T14:41:35.19Z" }, + { url = "https://files.pythonhosted.org/packages/89/32/a22a281806e3ef21b72db16f948cad22ec68e4bdd384139291e00ff82fe2/google_crc32c-1.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:0f99eaa09a9a7e642a61e06742856eec8b19fc0037832e03f941fe7cf0c8e4db", size = 33475, upload-time = "2025-03-26T14:29:11.771Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c5/002975aff514e57fc084ba155697a049b3f9b52225ec3bc0f542871dd524/google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d1da0d74ec5634a05f53ef7df18fc646666a25efaaca9fc7dcfd4caf1d98c3", size = 33243, upload-time = "2025-03-26T14:41:35.975Z" }, + { url = "https://files.pythonhosted.org/packages/61/cb/c585282a03a0cea70fcaa1bf55d5d702d0f2351094d663ec3be1c6c67c52/google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e10554d4abc5238823112c2ad7e4560f96c7bf3820b202660373d769d9e6e4c9", size = 32870, upload-time = "2025-03-26T14:41:37.08Z" }, + { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241, upload-time = "2025-03-26T14:41:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" }, +] + +[[package]] +name = "google-genai" +version = "1.47.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "google-auth" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/97/784fba9bc6c41263ff90cb9063eadfdd755dde79cfa5a8d0e397b067dcf9/google_genai-1.47.0.tar.gz", hash = "sha256:ecece00d0a04e6739ea76cc8dad82ec9593d9380aaabef078990e60574e5bf59", size = 241471, upload-time = "2025-10-29T22:01:02.88Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/ef/e080e8d67c270ea320956bb911a9359664fc46d3b87d1f029decd33e5c4c/google_genai-1.47.0-py3-none-any.whl", hash = "sha256:e3851237556cbdec96007d8028b4b1f2425cdc5c099a8dc36b72a57e42821b60", size = 241506, upload-time = "2025-10-29T22:01:00.982Z" }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099, upload-time = "2024-08-07T22:20:38.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251, upload-time = "2024-08-07T22:20:36.409Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, +] + +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/27/45/80935968b53cfd3f33cf99ea5f08227f2646e044568c9b1555b58ffd61c2/greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0", size = 1564846, upload-time = "2025-11-04T12:42:15.191Z" }, + { url = "https://files.pythonhosted.org/packages/69/02/b7c30e5e04752cb4db6202a3858b149c0710e5453b71a3b2aec5d78a1aab/greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d", size = 1633814, upload-time = "2025-11-04T12:42:17.175Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, + { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, + { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, + { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, + { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/23/6e/74407aed965a4ab6ddd93a7ded3180b730d281c77b765788419484cdfeef/greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269", size = 1612508, upload-time = "2025-11-04T12:42:23.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/da/343cd760ab2f92bac1845ca07ee3faea9fe52bee65f7bcb19f16ad7de08b/greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681", size = 1680760, upload-time = "2025-11-04T12:42:25.341Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, +] + +[[package]] +name = "groq" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/b1/653567a92d876e3e52cdce6780ac3f6dfec5101b6c81a577e3c5abddeebe/groq-0.23.1.tar.gz", hash = "sha256:952e34895f9bfb78ab479e495d77b32180262e5c42f531ce3a1722d6e5a04dfb", size = 125359, upload-time = "2025-04-24T18:59:32.562Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/9a/54948664261f707a24377ee7e280dbca52b7265fce8613c52dae0cbf5cf5/groq-0.23.1-py3-none-any.whl", hash = "sha256:05fa38c3d0ad03c19c6185f98f6a73901c2a463e844fd067b79f7b05c8346946", size = 127351, upload-time = "2025-04-24T18:59:30.809Z" }, +] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", extra = ["grpc"] }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/4e/8d0ca3b035e41fe0b3f31ebbb638356af720335e5a11154c330169b40777/grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20", size = 16259, upload-time = "2025-03-17T11:40:23.586Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/66/6f/dd9b178aee7835b96c2e63715aba6516a9d50f6bebbd1cc1d32c82a2a6c3/grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351", size = 19242, upload-time = "2025-03-17T11:40:22.648Z" }, +] + +[[package]] +name = "grpcio" +version = "1.75.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, + { url = "https://files.pythonhosted.org/packages/46/74/bac4ab9f7722164afdf263ae31ba97b8174c667153510322a5eba4194c32/grpcio-1.75.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:3bed22e750d91d53d9e31e0af35a7b0b51367e974e14a4ff229db5b207647884", size = 5672779, upload-time = "2025-09-26T09:02:19.11Z" }, + { url = "https://files.pythonhosted.org/packages/a6/52/d0483cfa667cddaa294e3ab88fd2c2a6e9dc1a1928c0e5911e2e54bd5b50/grpcio-1.75.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5b8f381eadcd6ecaa143a21e9e80a26424c76a0a9b3d546febe6648f3a36a5ac", size = 11470623, upload-time = "2025-09-26T09:02:22.117Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e4/d1954dce2972e32384db6a30273275e8c8ea5a44b80347f9055589333b3f/grpcio-1.75.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5bf4001d3293e3414d0cf99ff9b1139106e57c3a66dfff0c5f60b2a6286ec133", size = 6248838, upload-time = "2025-09-26T09:02:26.426Z" }, + { url = "https://files.pythonhosted.org/packages/06/43/073363bf63826ba8077c335d797a8d026f129dc0912b69c42feaf8f0cd26/grpcio-1.75.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f82ff474103e26351dacfe8d50214e7c9322960d8d07ba7fa1d05ff981c8b2d", size = 6922663, upload-time = "2025-09-26T09:02:28.724Z" }, + { url = "https://files.pythonhosted.org/packages/c2/6f/076ac0df6c359117676cacfa8a377e2abcecec6a6599a15a672d331f6680/grpcio-1.75.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ee119f4f88d9f75414217823d21d75bfe0e6ed40135b0cbbfc6376bc9f7757d", size = 6436149, upload-time = "2025-09-26T09:02:30.971Z" }, + { url = "https://files.pythonhosted.org/packages/6b/27/1d08824f1d573fcb1fa35ede40d6020e68a04391709939e1c6f4193b445f/grpcio-1.75.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:664eecc3abe6d916fa6cf8dd6b778e62fb264a70f3430a3180995bf2da935446", size = 7067989, upload-time = "2025-09-26T09:02:33.233Z" }, + { url = "https://files.pythonhosted.org/packages/c6/98/98594cf97b8713feb06a8cb04eeef60b4757e3e2fb91aa0d9161da769843/grpcio-1.75.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c32193fa08b2fbebf08fe08e84f8a0aad32d87c3ad42999c65e9449871b1c66e", size = 8010717, upload-time = "2025-09-26T09:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7e/bb80b1bba03c12158f9254762cdf5cced4a9bc2e8ed51ed335915a5a06ef/grpcio-1.75.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5cebe13088b9254f6e615bcf1da9131d46cfa4e88039454aca9cb65f639bd3bc", size = 7463822, upload-time = "2025-09-26T09:02:38.26Z" }, + { url = "https://files.pythonhosted.org/packages/23/1c/1ea57fdc06927eb5640f6750c697f596f26183573069189eeaf6ef86ba2d/grpcio-1.75.1-cp313-cp313-win32.whl", hash = "sha256:4b4c678e7ed50f8ae8b8dbad15a865ee73ce12668b6aaf411bf3258b5bc3f970", size = 3938490, upload-time = "2025-09-26T09:02:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/4b/24/fbb8ff1ccadfbf78ad2401c41aceaf02b0d782c084530d8871ddd69a2d49/grpcio-1.75.1-cp313-cp313-win_amd64.whl", hash = "sha256:5573f51e3f296a1bcf71e7a690c092845fb223072120f4bdb7a5b48e111def66", size = 4642538, upload-time = "2025-09-26T09:02:42.519Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1b/9a0a5cecd24302b9fdbcd55d15ed6267e5f3d5b898ff9ac8cbe17ee76129/grpcio-1.75.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:c05da79068dd96723793bffc8d0e64c45f316248417515f28d22204d9dae51c7", size = 5673319, upload-time = "2025-09-26T09:02:44.742Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ec/9d6959429a83fbf5df8549c591a8a52bb313976f6646b79852c4884e3225/grpcio-1.75.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06373a94fd16ec287116a825161dca179a0402d0c60674ceeec8c9fba344fe66", size = 11480347, upload-time = "2025-09-26T09:02:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/09/7a/26da709e42c4565c3d7bf999a9569da96243ce34a8271a968dee810a7cf1/grpcio-1.75.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4484f4b7287bdaa7a5b3980f3c7224c3c622669405d20f69549f5fb956ad0421", size = 6254706, upload-time = "2025-09-26T09:02:50.4Z" }, + { url = "https://files.pythonhosted.org/packages/f1/08/dcb26a319d3725f199c97e671d904d84ee5680de57d74c566a991cfab632/grpcio-1.75.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2720c239c1180eee69f7883c1d4c83fc1a495a2535b5fa322887c70bf02b16e8", size = 6922501, upload-time = "2025-09-26T09:02:52.711Z" }, + { url = "https://files.pythonhosted.org/packages/78/66/044d412c98408a5e23cb348845979a2d17a2e2b6c3c34c1ec91b920f49d0/grpcio-1.75.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:07a554fa31c668cf0e7a188678ceeca3cb8fead29bbe455352e712ec33ca701c", size = 6437492, upload-time = "2025-09-26T09:02:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9d/5e3e362815152aa1afd8b26ea613effa005962f9da0eec6e0e4527e7a7d1/grpcio-1.75.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3e71a2105210366bfc398eef7f57a664df99194f3520edb88b9c3a7e46ee0d64", size = 7081061, upload-time = "2025-09-26T09:02:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1a/46615682a19e100f46e31ddba9ebc297c5a5ab9ddb47b35443ffadb8776c/grpcio-1.75.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8679aa8a5b67976776d3c6b0521e99d1c34db8a312a12bcfd78a7085cb9b604e", size = 8010849, upload-time = "2025-09-26T09:03:00.548Z" }, + { url = "https://files.pythonhosted.org/packages/67/8e/3204b94ac30b0f675ab1c06540ab5578660dc8b690db71854d3116f20d00/grpcio-1.75.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:aad1c774f4ebf0696a7f148a56d39a3432550612597331792528895258966dc0", size = 7464478, upload-time = "2025-09-26T09:03:03.096Z" }, + { url = "https://files.pythonhosted.org/packages/b7/97/2d90652b213863b2cf466d9c1260ca7e7b67a16780431b3eb1d0420e3d5b/grpcio-1.75.1-cp314-cp314-win32.whl", hash = "sha256:62ce42d9994446b307649cb2a23335fa8e927f7ab2cbf5fcb844d6acb4d85f9c", size = 4012672, upload-time = "2025-09-26T09:03:05.477Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/e2e6e9fc1c985cd1a59e6996a05647c720fe8a03b92f5ec2d60d366c531e/grpcio-1.75.1-cp314-cp314-win_amd64.whl", hash = "sha256:f86e92275710bea3000cb79feca1762dc0ad3b27830dd1a74e82ab321d4ee464", size = 4772475, upload-time = "2025-09-26T09:03:07.661Z" }, +] + +[[package]] +name = "grpcio-status" +version = "1.71.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/d1/b6e9877fedae3add1afdeae1f89d1927d296da9cf977eca0eb08fb8a460e/grpcio_status-1.71.2.tar.gz", hash = "sha256:c7a97e176df71cdc2c179cd1847d7fc86cca5832ad12e9798d7fed6b7a1aab50", size = 13677, upload-time = "2025-06-28T04:24:05.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/58/317b0134129b556a93a3b0afe00ee675b5657f0155509e22fcb853bafe2d/grpcio_status-1.71.2-py3-none-any.whl", hash = "sha256:803c98cb6a8b7dc6dbb785b1111aed739f241ab5e9da0bba96888aa74704cfd3", size = 14424, upload-time = "2025-06-28T04:23:42.136Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "h2" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.1.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httplib2" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", size = 250759, upload-time = "2025-09-11T12:16:03.403Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", size = 91148, upload-time = "2025-09-11T12:16:01.803Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload-time = "2023-12-22T08:01:21.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload-time = "2023-12-22T08:01:19.89Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "0.35.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, +] + +[[package]] +name = "humanfriendly" +version = "10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyreadline3", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + +[[package]] +name = "identify" +version = "2.6.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/c4/62963f25a678f6a050fb0505a65e9e726996171e6dbe1547f79619eefb15/identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a", size = 99283, upload-time = "2025-09-06T19:30:52.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "image-search-module" +version = "0.1.0" +source = { editable = "modules/image_search_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "flo-cloud" }, + { name = "numpy" }, + { name = "opencv-python" }, + { name = "pillow" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "scikit-learn" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-mock" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "numpy", specifier = ">=1.24.0" }, + { name = "opencv-python", specifier = ">=4.8.0" }, + { name = "pillow", specifier = ">=10.0.0" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-asyncio", specifier = ">=0.26.0" }, + { name = "scikit-learn", specifier = ">=1.3.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "pytest-mock", specifier = ">=3.12.0" }, +] + +[[package]] +name = "imapclient" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/31/883f78210ed7578f6dd41e4dbc3ad5e7c6127a51e56513b8b7bb7efdf9b3/IMAPClient-2.1.0.zip", hash = "sha256:60ba79758cc9f13ec910d7a3df9acaaf2bb6c458720d9a02ec33a41352fd1b99", size = 248423, upload-time = "2018-09-05T11:20:32.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/39/e1c2c2c6e2356ab6ea81fcfc0a74b044b311d6a91a45300811d9a6077ef7/IMAPClient-2.1.0-py2.py3-none-any.whl", hash = "sha256:3eeb97b9aa8faab0caa5024d74bfde59408fbd542781246f6960873c7bf0dd01", size = 73972, upload-time = "2018-09-05T11:20:29.064Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304, upload-time = "2024-09-11T14:56:08.937Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514, upload-time = "2024-09-11T14:56:07.019Z" }, +] + +[[package]] +name = "inference-app" +version = "0.1.0" +source = { editable = "apps/inference_app" } +dependencies = [ + { name = "accelerate" }, + { name = "common-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "flo-cloud" }, + { name = "numpy" }, + { name = "opencv-python" }, + { name = "pillow" }, + { name = "psycopg2" }, + { name = "python-dotenv" }, + { name = "python-multipart" }, + { name = "torchvision" }, + { name = "transformers" }, + { name = "uvicorn" }, +] + +[package.metadata] +requires-dist = [ + { name = "accelerate", specifier = ">=0.33.0,<1.0.0" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "numpy", specifier = ">=1.26.4,<2.0.0" }, + { name = "opencv-python", specifier = ">=4.11.0.86" }, + { name = "pillow", specifier = ">=11.1.0,<12" }, + { name = "psycopg2", specifier = ">=2.9.10,<3.0.0" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, + { name = "python-multipart", specifier = "==0.0.9" }, + { name = "torchvision", specifier = "==0.16.0" }, + { name = "transformers", specifier = ">=4.45.0" }, + { name = "uvicorn", specifier = ">=0.30.1,<1.0.0" }, +] + +[[package]] +name = "inference-module" +version = "0.1.0" +source = { editable = "modules/inference_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "flo-cloud" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "insights-module" +version = "0.0.1" +source = { editable = "modules/insights_module" } +dependencies = [ + { name = "boto3" }, + { name = "common-module" }, + { name = "dacite" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-storage" }, + { name = "httpx" }, + { name = "psycopg2" }, + { name = "pyyaml" }, + { name = "redshift-connector" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = "<=1.38.40" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "dacite", specifier = ">=1.9.2,<2.0.0" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "google-cloud-bigquery", specifier = "==3.34.0" }, + { name = "google-cloud-storage", specifier = "<3.0.0" }, + { name = "httpx", specifier = ">=0.28.1,<1.0.0" }, + { name = "psycopg2", specifier = ">=2.9.10,<3.0.0" }, + { name = "pyyaml", specifier = ">=6.0.3,<7" }, + { name = "redshift-connector", specifier = ">=2.1.5,<3.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.4,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, +] + +[[package]] +name = "iterators" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/c4/135b5bdb9f14f728fe1274361b336f77c5f1606af9a5622a765fe75f5fa0/iterators-0.2.0.tar.gz", hash = "sha256:e9927a1ea1ef081830fd1512f3916857c36bd4b37272819a6cd29d0f44431b97", size = 4284, upload-time = "2023-01-23T16:07:02.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/a1/9c29772ac9f3bdf9837c92ba5c1fc93f75da14c2e0c3fc41e10485f68feb/iterators-0.2.0-py3-none-any.whl", hash = "sha256:1d7ff03f576c9de0e01bac66209556c066d6b1fc45583a99cfc9f4645be7900e", size = 5022, upload-time = "2023-01-23T16:07:00.352Z" }, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021, upload-time = "2025-09-15T09:19:43.523Z" }, + { url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384, upload-time = "2025-09-15T09:19:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389, upload-time = "2025-09-15T09:19:46.094Z" }, + { url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519, upload-time = "2025-09-15T09:19:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198, upload-time = "2025-09-15T09:19:49.116Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835, upload-time = "2025-09-15T09:19:50.468Z" }, + { url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655, upload-time = "2025-09-15T09:19:51.726Z" }, + { url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135, upload-time = "2025-09-15T09:19:53.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063, upload-time = "2025-09-15T09:19:54.447Z" }, + { url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139, upload-time = "2025-09-15T09:19:55.764Z" }, + { url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369, upload-time = "2025-09-15T09:19:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538, upload-time = "2025-09-15T09:19:58.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737, upload-time = "2025-09-15T09:19:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183, upload-time = "2025-09-15T09:20:01.442Z" }, + { url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225, upload-time = "2025-09-15T09:20:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/ee/3b/e7f45be7d3969bdf2e3cd4b816a7a1d272507cd0edd2d6dc4b07514f2d9a/jiter-0.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:9a6dff27eca70930bdbe4cbb7c1a4ba8526e13b63dc808c0670083d2d51a4a72", size = 304414, upload-time = "2025-09-15T09:20:04.357Z" }, + { url = "https://files.pythonhosted.org/packages/06/32/13e8e0d152631fcc1907ceb4943711471be70496d14888ec6e92034e2caf/jiter-0.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b1ae2a7593a62132c7d4c2abbee80bbbb94fdc6d157e2c6cc966250c564ef774", size = 314223, upload-time = "2025-09-15T09:20:05.631Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7e/abedd5b5a20ca083f778d96bba0d2366567fcecb0e6e34ff42640d5d7a18/jiter-0.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b13a431dba4b059e9e43019d3022346d009baf5066c24dcdea321a303cde9f0", size = 337306, upload-time = "2025-09-15T09:20:06.917Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/30d59bdc1204c86aa975ec72c48c482fee6633120ee9c3ab755e4dfefea8/jiter-0.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:af62e84ca3889604ebb645df3b0a3f3bcf6b92babbff642bd214616f57abb93a", size = 360565, upload-time = "2025-09-15T09:20:08.283Z" }, + { url = "https://files.pythonhosted.org/packages/fe/88/567288e0d2ed9fa8f7a3b425fdaf2cb82b998633c24fe0d98f5417321aa8/jiter-0.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f3b32bb723246e6b351aecace52aba78adb8eeb4b2391630322dc30ff6c773", size = 486465, upload-time = "2025-09-15T09:20:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/18/6e/7b72d09273214cadd15970e91dd5ed9634bee605176107db21e1e4205eb1/jiter-0.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:adcab442f4a099a358a7f562eaa54ed6456fb866e922c6545a717be51dbed7d7", size = 377581, upload-time = "2025-09-15T09:20:10.884Z" }, + { url = "https://files.pythonhosted.org/packages/58/52/4db456319f9d14deed325f70102577492e9d7e87cf7097bda9769a1fcacb/jiter-0.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9967c2ab338ee2b2c0102fd379ec2693c496abf71ffd47e4d791d1f593b68e2", size = 347102, upload-time = "2025-09-15T09:20:12.175Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b4/433d5703c38b26083aec7a733eb5be96f9c6085d0e270a87ca6482cbf049/jiter-0.11.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e7d0bed3b187af8b47a981d9742ddfc1d9b252a7235471ad6078e7e4e5fe75c2", size = 386477, upload-time = "2025-09-15T09:20:13.428Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7a/a60bfd9c55b55b07c5c441c5085f06420b6d493ce9db28d069cc5b45d9f3/jiter-0.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:f6fe0283e903ebc55f1a6cc569b8c1f3bf4abd026fed85e3ff8598a9e6f982f0", size = 516004, upload-time = "2025-09-15T09:20:14.848Z" }, + { url = "https://files.pythonhosted.org/packages/2e/46/f8363e5ecc179b4ed0ca6cb0a6d3bfc266078578c71ff30642ea2ce2f203/jiter-0.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5821e3d66606b29ae5b497230b304f1376f38137d69e35f8d2bd5f310ff73", size = 507855, upload-time = "2025-09-15T09:20:16.176Z" }, + { url = "https://files.pythonhosted.org/packages/90/33/396083357d51d7ff0f9805852c288af47480d30dd31d8abc74909b020761/jiter-0.11.0-cp314-cp314-win32.whl", hash = "sha256:c2d13ba7567ca8799f17c76ed56b1d49be30df996eb7fa33e46b62800562a5e2", size = 205802, upload-time = "2025-09-15T09:20:17.661Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/eb06ca556b2551d41de7d03bf2ee24285fa3d0c58c5f8d95c64c9c3281b1/jiter-0.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fb4790497369d134a07fc763cc88888c46f734abdd66f9fdf7865038bf3a8f40", size = 313405, upload-time = "2025-09-15T09:20:18.918Z" }, + { url = "https://files.pythonhosted.org/packages/af/22/7ab7b4ec3a1c1f03aef376af11d23b05abcca3fb31fbca1e7557053b1ba2/jiter-0.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e2bbf24f16ba5ad4441a9845e40e4ea0cb9eed00e76ba94050664ef53ef4406", size = 347102, upload-time = "2025-09-15T09:20:20.16Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077, upload-time = "2025-08-27T12:15:46.575Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396, upload-time = "2025-08-27T12:15:45.188Z" }, +] + +[[package]] +name = "knowledge-base-module" +version = "0.1.0" +source = { editable = "modules/knowledge_base_module" } +dependencies = [ + { name = "boto3" }, + { name = "datasource" }, + { name = "flo-ai" }, + { name = "flo-cloud" }, + { name = "google-cloud-pubsub" }, + { name = "numpy" }, + { name = "ollama" }, + { name = "pandas" }, + { name = "pydantic" }, + { name = "pymupdf" }, + { name = "python-multipart" }, + { name = "pyyaml" }, + { name = "textract" }, + { name = "tiktoken" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "auth-module" }, + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, + { name = "user-management-module" }, +] + +[package.metadata] +requires-dist = [ + { name = "boto3", specifier = "<=1.38.40" }, + { name = "datasource", editable = "plugins/datasource" }, + { name = "flo-ai", specifier = ">=1.1.0rc5" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "google-cloud-pubsub", specifier = "~=2.30.0" }, + { name = "numpy", specifier = ">=1.24,<2.0" }, + { name = "ollama", specifier = "~=0.4.8" }, + { name = "pandas", specifier = "~=2.2.3" }, + { name = "pydantic", specifier = ">=2.11.3,<3.0.0" }, + { name = "pymupdf", specifier = ">=1.25.5,<2.0.0" }, + { name = "python-multipart" }, + { name = "pyyaml", specifier = ">=6.0.3,<7" }, + { name = "textract", specifier = "~=1.6.5" }, + { name = "tiktoken", specifier = "~=0.9.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "auth-module", editable = "modules/auth_module" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "pytest", specifier = ">=8.3.4,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, + { name = "user-management-module", editable = "modules/user_management_module" }, +] + +[[package]] +name = "llm-inference-config-module" +version = "0.1.0" +source = { editable = "modules/llm_inference_config_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "pydantic" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.40.0" }, + { name = "fastapi", specifier = ">=0.100.0" }, + { name = "httpx", specifier = ">=0.25.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, +] + +[[package]] +name = "llvmlite" +version = "0.44.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, + { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, + { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, + { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, + { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/89/24/4c0ca705a717514c2092b18476e7a12c74d34d875e05e4d742618ebbf449/llvmlite-0.44.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:319bddd44e5f71ae2689859b7203080716448a3cd1128fb144fe5c055219d516", size = 28132306, upload-time = "2025-01-20T11:14:09.035Z" }, + { url = "https://files.pythonhosted.org/packages/01/cf/1dd5a60ba6aee7122ab9243fd614abcf22f36b0437cbbe1ccf1e3391461c/llvmlite-0.44.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c58867118bad04a0bb22a2e0068c693719658105e40009ffe95c7000fcde88e", size = 26201090, upload-time = "2025-01-20T11:14:15.401Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1b/656f5a357de7135a3777bd735cc7c9b8f23b4d37465505bd0eaf4be9befe/llvmlite-0.44.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46224058b13c96af1365290bdfebe9a6264ae62fb79b2b55693deed11657a8bf", size = 42361904, upload-time = "2025-01-20T11:14:22.949Z" }, + { url = "https://files.pythonhosted.org/packages/d8/e1/12c5f20cb9168fb3464a34310411d5ad86e4163c8ff2d14a2b57e5cc6bac/llvmlite-0.44.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aa0097052c32bf721a4efc03bd109d335dfa57d9bffb3d4c24cc680711b8b4fc", size = 41184245, upload-time = "2025-01-20T11:14:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/d0/81/e66fc86539293282fd9cb7c9417438e897f369e79ffb62e1ae5e5154d4dd/llvmlite-0.44.0-cp313-cp313-win_amd64.whl", hash = "sha256:2fb7c4f2fb86cbae6dca3db9ab203eeea0e22d73b99bc2341cdf9de93612e930", size = 30331193, upload-time = "2025-01-20T11:14:38.578Z" }, +] + +[[package]] +name = "loguru" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "win32-setctime", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, +] + +[[package]] +name = "lxml" +version = "5.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/3d/14e82fc7c8fb1b7761f7e748fd47e2ec8276d137b6acfe5a4bb73853e08f/lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd", size = 3679479, upload-time = "2025-04-23T01:50:29.322Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/2d/67693cc8a605a12e5975380d7ff83020dcc759351b5a066e1cced04f797b/lxml-5.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:98a3912194c079ef37e716ed228ae0dcb960992100461b704aea4e93af6b0bb9", size = 8083240, upload-time = "2025-04-23T01:45:18.566Z" }, + { url = "https://files.pythonhosted.org/packages/73/53/b5a05ab300a808b72e848efd152fe9c022c0181b0a70b8bca1199f1bed26/lxml-5.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ea0252b51d296a75f6118ed0d8696888e7403408ad42345d7dfd0d1e93309a7", size = 4387685, upload-time = "2025-04-23T01:45:21.387Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/1a3879c5f512bdcd32995c301886fe082b2edd83c87d41b6d42d89b4ea4d/lxml-5.4.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92b69441d1bd39f4940f9eadfa417a25862242ca2c396b406f9272ef09cdcaa", size = 4991164, upload-time = "2025-04-23T01:45:23.849Z" }, + { url = "https://files.pythonhosted.org/packages/f9/94/bbc66e42559f9d04857071e3b3d0c9abd88579367fd2588a4042f641f57e/lxml-5.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20e16c08254b9b6466526bc1828d9370ee6c0d60a4b64836bc3ac2917d1e16df", size = 4746206, upload-time = "2025-04-23T01:45:26.361Z" }, + { url = "https://files.pythonhosted.org/packages/66/95/34b0679bee435da2d7cae895731700e519a8dfcab499c21662ebe671603e/lxml-5.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7605c1c32c3d6e8c990dd28a0970a3cbbf1429d5b92279e37fda05fb0c92190e", size = 5342144, upload-time = "2025-04-23T01:45:28.939Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5d/abfcc6ab2fa0be72b2ba938abdae1f7cad4c632f8d552683ea295d55adfb/lxml-5.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ecf4c4b83f1ab3d5a7ace10bafcb6f11df6156857a3c418244cef41ca9fa3e44", size = 4825124, upload-time = "2025-04-23T01:45:31.361Z" }, + { url = "https://files.pythonhosted.org/packages/5a/78/6bd33186c8863b36e084f294fc0a5e5eefe77af95f0663ef33809cc1c8aa/lxml-5.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cef4feae82709eed352cd7e97ae062ef6ae9c7b5dbe3663f104cd2c0e8d94ba", size = 4876520, upload-time = "2025-04-23T01:45:34.191Z" }, + { url = "https://files.pythonhosted.org/packages/3b/74/4d7ad4839bd0fc64e3d12da74fc9a193febb0fae0ba6ebd5149d4c23176a/lxml-5.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:df53330a3bff250f10472ce96a9af28628ff1f4efc51ccba351a8820bca2a8ba", size = 4765016, upload-time = "2025-04-23T01:45:36.7Z" }, + { url = "https://files.pythonhosted.org/packages/24/0d/0a98ed1f2471911dadfc541003ac6dd6879fc87b15e1143743ca20f3e973/lxml-5.4.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:aefe1a7cb852fa61150fcb21a8c8fcea7b58c4cb11fbe59c97a0a4b31cae3c8c", size = 5362884, upload-time = "2025-04-23T01:45:39.291Z" }, + { url = "https://files.pythonhosted.org/packages/48/de/d4f7e4c39740a6610f0f6959052b547478107967362e8424e1163ec37ae8/lxml-5.4.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ef5a7178fcc73b7d8c07229e89f8eb45b2908a9238eb90dcfc46571ccf0383b8", size = 4902690, upload-time = "2025-04-23T01:45:42.386Z" }, + { url = "https://files.pythonhosted.org/packages/07/8c/61763abd242af84f355ca4ef1ee096d3c1b7514819564cce70fd18c22e9a/lxml-5.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d2ed1b3cb9ff1c10e6e8b00941bb2e5bb568b307bfc6b17dffbbe8be5eecba86", size = 4944418, upload-time = "2025-04-23T01:45:46.051Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c5/6d7e3b63e7e282619193961a570c0a4c8a57fe820f07ca3fe2f6bd86608a/lxml-5.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:72ac9762a9f8ce74c9eed4a4e74306f2f18613a6b71fa065495a67ac227b3056", size = 4827092, upload-time = "2025-04-23T01:45:48.943Z" }, + { url = "https://files.pythonhosted.org/packages/71/4a/e60a306df54680b103348545706a98a7514a42c8b4fbfdcaa608567bb065/lxml-5.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f5cb182f6396706dc6cc1896dd02b1c889d644c081b0cdec38747573db88a7d7", size = 5418231, upload-time = "2025-04-23T01:45:51.481Z" }, + { url = "https://files.pythonhosted.org/packages/27/f2/9754aacd6016c930875854f08ac4b192a47fe19565f776a64004aa167521/lxml-5.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3a3178b4873df8ef9457a4875703488eb1622632a9cee6d76464b60e90adbfcd", size = 5261798, upload-time = "2025-04-23T01:45:54.146Z" }, + { url = "https://files.pythonhosted.org/packages/38/a2/0c49ec6941428b1bd4f280650d7b11a0f91ace9db7de32eb7aa23bcb39ff/lxml-5.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e094ec83694b59d263802ed03a8384594fcce477ce484b0cbcd0008a211ca751", size = 4988195, upload-time = "2025-04-23T01:45:56.685Z" }, + { url = "https://files.pythonhosted.org/packages/7a/75/87a3963a08eafc46a86c1131c6e28a4de103ba30b5ae903114177352a3d7/lxml-5.4.0-cp311-cp311-win32.whl", hash = "sha256:4329422de653cdb2b72afa39b0aa04252fca9071550044904b2e7036d9d97fe4", size = 3474243, upload-time = "2025-04-23T01:45:58.863Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f9/1f0964c4f6c2be861c50db380c554fb8befbea98c6404744ce243a3c87ef/lxml-5.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd3be6481ef54b8cfd0e1e953323b7aa9d9789b94842d0e5b142ef4bb7999539", size = 3815197, upload-time = "2025-04-23T01:46:01.096Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4c/d101ace719ca6a4ec043eb516fcfcb1b396a9fccc4fcd9ef593df34ba0d5/lxml-5.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b5aff6f3e818e6bdbbb38e5967520f174b18f539c2b9de867b1e7fde6f8d95a4", size = 8127392, upload-time = "2025-04-23T01:46:04.09Z" }, + { url = "https://files.pythonhosted.org/packages/11/84/beddae0cec4dd9ddf46abf156f0af451c13019a0fa25d7445b655ba5ccb7/lxml-5.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942a5d73f739ad7c452bf739a62a0f83e2578afd6b8e5406308731f4ce78b16d", size = 4415103, upload-time = "2025-04-23T01:46:07.227Z" }, + { url = "https://files.pythonhosted.org/packages/d0/25/d0d93a4e763f0462cccd2b8a665bf1e4343dd788c76dcfefa289d46a38a9/lxml-5.4.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:460508a4b07364d6abf53acaa0a90b6d370fafde5693ef37602566613a9b0779", size = 5024224, upload-time = "2025-04-23T01:46:10.237Z" }, + { url = "https://files.pythonhosted.org/packages/31/ce/1df18fb8f7946e7f3388af378b1f34fcf253b94b9feedb2cec5969da8012/lxml-5.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529024ab3a505fed78fe3cc5ddc079464e709f6c892733e3f5842007cec8ac6e", size = 4769913, upload-time = "2025-04-23T01:46:12.757Z" }, + { url = "https://files.pythonhosted.org/packages/4e/62/f4a6c60ae7c40d43657f552f3045df05118636be1165b906d3423790447f/lxml-5.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ca56ebc2c474e8f3d5761debfd9283b8b18c76c4fc0967b74aeafba1f5647f9", size = 5290441, upload-time = "2025-04-23T01:46:16.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/aa/04f00009e1e3a77838c7fc948f161b5d2d5de1136b2b81c712a263829ea4/lxml-5.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a81e1196f0a5b4167a8dafe3a66aa67c4addac1b22dc47947abd5d5c7a3f24b5", size = 4820165, upload-time = "2025-04-23T01:46:19.137Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/e0b2f61fa2404bf0f1fdf1898377e5bd1b74cc9b2cf2c6ba8509b8f27990/lxml-5.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00b8686694423ddae324cf614e1b9659c2edb754de617703c3d29ff568448df5", size = 4932580, upload-time = "2025-04-23T01:46:21.963Z" }, + { url = "https://files.pythonhosted.org/packages/24/a2/8263f351b4ffe0ed3e32ea7b7830f845c795349034f912f490180d88a877/lxml-5.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:c5681160758d3f6ac5b4fea370495c48aac0989d6a0f01bb9a72ad8ef5ab75c4", size = 4759493, upload-time = "2025-04-23T01:46:24.316Z" }, + { url = "https://files.pythonhosted.org/packages/05/00/41db052f279995c0e35c79d0f0fc9f8122d5b5e9630139c592a0b58c71b4/lxml-5.4.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:2dc191e60425ad70e75a68c9fd90ab284df64d9cd410ba8d2b641c0c45bc006e", size = 5324679, upload-time = "2025-04-23T01:46:27.097Z" }, + { url = "https://files.pythonhosted.org/packages/1d/be/ee99e6314cdef4587617d3b3b745f9356d9b7dd12a9663c5f3b5734b64ba/lxml-5.4.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:67f779374c6b9753ae0a0195a892a1c234ce8416e4448fe1e9f34746482070a7", size = 4890691, upload-time = "2025-04-23T01:46:30.009Z" }, + { url = "https://files.pythonhosted.org/packages/ad/36/239820114bf1d71f38f12208b9c58dec033cbcf80101cde006b9bde5cffd/lxml-5.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:79d5bfa9c1b455336f52343130b2067164040604e41f6dc4d8313867ed540079", size = 4955075, upload-time = "2025-04-23T01:46:32.33Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e1/1b795cc0b174efc9e13dbd078a9ff79a58728a033142bc6d70a1ee8fc34d/lxml-5.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d3c30ba1c9b48c68489dc1829a6eede9873f52edca1dda900066542528d6b20", size = 4838680, upload-time = "2025-04-23T01:46:34.852Z" }, + { url = "https://files.pythonhosted.org/packages/72/48/3c198455ca108cec5ae3662ae8acd7fd99476812fd712bb17f1b39a0b589/lxml-5.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1af80c6316ae68aded77e91cd9d80648f7dd40406cef73df841aa3c36f6907c8", size = 5391253, upload-time = "2025-04-23T01:46:37.608Z" }, + { url = "https://files.pythonhosted.org/packages/d6/10/5bf51858971c51ec96cfc13e800a9951f3fd501686f4c18d7d84fe2d6352/lxml-5.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4d885698f5019abe0de3d352caf9466d5de2baded00a06ef3f1216c1a58ae78f", size = 5261651, upload-time = "2025-04-23T01:46:40.183Z" }, + { url = "https://files.pythonhosted.org/packages/2b/11/06710dd809205377da380546f91d2ac94bad9ff735a72b64ec029f706c85/lxml-5.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea53d51859b6c64e7c51d522c03cc2c48b9b5d6172126854cc7f01aa11f52bc", size = 5024315, upload-time = "2025-04-23T01:46:43.333Z" }, + { url = "https://files.pythonhosted.org/packages/f5/b0/15b6217834b5e3a59ebf7f53125e08e318030e8cc0d7310355e6edac98ef/lxml-5.4.0-cp312-cp312-win32.whl", hash = "sha256:d90b729fd2732df28130c064aac9bb8aff14ba20baa4aee7bd0795ff1187545f", size = 3486149, upload-time = "2025-04-23T01:46:45.684Z" }, + { url = "https://files.pythonhosted.org/packages/91/1e/05ddcb57ad2f3069101611bd5f5084157d90861a2ef460bf42f45cced944/lxml-5.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1dc4ca99e89c335a7ed47d38964abcb36c5910790f9bd106f2a8fa2ee0b909d2", size = 3817095, upload-time = "2025-04-23T01:46:48.521Z" }, + { url = "https://files.pythonhosted.org/packages/87/cb/2ba1e9dd953415f58548506fa5549a7f373ae55e80c61c9041b7fd09a38a/lxml-5.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:773e27b62920199c6197130632c18fb7ead3257fce1ffb7d286912e56ddb79e0", size = 8110086, upload-time = "2025-04-23T01:46:52.218Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3e/6602a4dca3ae344e8609914d6ab22e52ce42e3e1638c10967568c5c1450d/lxml-5.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ce9c671845de9699904b1e9df95acfe8dfc183f2310f163cdaa91a3535af95de", size = 4404613, upload-time = "2025-04-23T01:46:55.281Z" }, + { url = "https://files.pythonhosted.org/packages/4c/72/bf00988477d3bb452bef9436e45aeea82bb40cdfb4684b83c967c53909c7/lxml-5.4.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9454b8d8200ec99a224df8854786262b1bd6461f4280064c807303c642c05e76", size = 5012008, upload-time = "2025-04-23T01:46:57.817Z" }, + { url = "https://files.pythonhosted.org/packages/92/1f/93e42d93e9e7a44b2d3354c462cd784dbaaf350f7976b5d7c3f85d68d1b1/lxml-5.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cccd007d5c95279e529c146d095f1d39ac05139de26c098166c4beb9374b0f4d", size = 4760915, upload-time = "2025-04-23T01:47:00.745Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/363009390d0b461cf9976a499e83b68f792e4c32ecef092f3f9ef9c4ba54/lxml-5.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fce1294a0497edb034cb416ad3e77ecc89b313cff7adbee5334e4dc0d11f422", size = 5283890, upload-time = "2025-04-23T01:47:04.702Z" }, + { url = "https://files.pythonhosted.org/packages/19/dc/6056c332f9378ab476c88e301e6549a0454dbee8f0ae16847414f0eccb74/lxml-5.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24974f774f3a78ac12b95e3a20ef0931795ff04dbb16db81a90c37f589819551", size = 4812644, upload-time = "2025-04-23T01:47:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/ee/8a/f8c66bbb23ecb9048a46a5ef9b495fd23f7543df642dabeebcb2eeb66592/lxml-5.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:497cab4d8254c2a90bf988f162ace2ddbfdd806fce3bda3f581b9d24c852e03c", size = 4921817, upload-time = "2025-04-23T01:47:10.317Z" }, + { url = "https://files.pythonhosted.org/packages/04/57/2e537083c3f381f83d05d9b176f0d838a9e8961f7ed8ddce3f0217179ce3/lxml-5.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e794f698ae4c5084414efea0f5cc9f4ac562ec02d66e1484ff822ef97c2cadff", size = 4753916, upload-time = "2025-04-23T01:47:12.823Z" }, + { url = "https://files.pythonhosted.org/packages/d8/80/ea8c4072109a350848f1157ce83ccd9439601274035cd045ac31f47f3417/lxml-5.4.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:2c62891b1ea3094bb12097822b3d44b93fc6c325f2043c4d2736a8ff09e65f60", size = 5289274, upload-time = "2025-04-23T01:47:15.916Z" }, + { url = "https://files.pythonhosted.org/packages/b3/47/c4be287c48cdc304483457878a3f22999098b9a95f455e3c4bda7ec7fc72/lxml-5.4.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:142accb3e4d1edae4b392bd165a9abdee8a3c432a2cca193df995bc3886249c8", size = 4874757, upload-time = "2025-04-23T01:47:19.793Z" }, + { url = "https://files.pythonhosted.org/packages/2f/04/6ef935dc74e729932e39478e44d8cfe6a83550552eaa072b7c05f6f22488/lxml-5.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1a42b3a19346e5601d1b8296ff6ef3d76038058f311902edd574461e9c036982", size = 4947028, upload-time = "2025-04-23T01:47:22.401Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f9/c33fc8daa373ef8a7daddb53175289024512b6619bc9de36d77dca3df44b/lxml-5.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4291d3c409a17febf817259cb37bc62cb7eb398bcc95c1356947e2871911ae61", size = 4834487, upload-time = "2025-04-23T01:47:25.513Z" }, + { url = "https://files.pythonhosted.org/packages/8d/30/fc92bb595bcb878311e01b418b57d13900f84c2b94f6eca9e5073ea756e6/lxml-5.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4f5322cf38fe0e21c2d73901abf68e6329dc02a4994e483adbcf92b568a09a54", size = 5381688, upload-time = "2025-04-23T01:47:28.454Z" }, + { url = "https://files.pythonhosted.org/packages/43/d1/3ba7bd978ce28bba8e3da2c2e9d5ae3f8f521ad3f0ca6ea4788d086ba00d/lxml-5.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0be91891bdb06ebe65122aa6bf3fc94489960cf7e03033c6f83a90863b23c58b", size = 5242043, upload-time = "2025-04-23T01:47:31.208Z" }, + { url = "https://files.pythonhosted.org/packages/ee/cd/95fa2201041a610c4d08ddaf31d43b98ecc4b1d74b1e7245b1abdab443cb/lxml-5.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:15a665ad90054a3d4f397bc40f73948d48e36e4c09f9bcffc7d90c87410e478a", size = 5021569, upload-time = "2025-04-23T01:47:33.805Z" }, + { url = "https://files.pythonhosted.org/packages/2d/a6/31da006fead660b9512d08d23d31e93ad3477dd47cc42e3285f143443176/lxml-5.4.0-cp313-cp313-win32.whl", hash = "sha256:d5663bc1b471c79f5c833cffbc9b87d7bf13f87e055a5c86c363ccd2348d7e82", size = 3485270, upload-time = "2025-04-23T01:47:36.133Z" }, + { url = "https://files.pythonhosted.org/packages/fc/14/c115516c62a7d2499781d2d3d7215218c0731b2c940753bf9f9b7b73924d/lxml-5.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:bcb7a1096b4b6b24ce1ac24d4942ad98f983cd3810f9711bcd0293f43a9d8b9f", size = 3814606, upload-time = "2025-04-23T01:47:39.028Z" }, +] + +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + +[[package]] +name = "markdown" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/7dd27d9d863b3376fcf23a5a13cb5d024aed1db46f963f1b5735ae43b3be/markdown-3.10.tar.gz", hash = "sha256:37062d4f2aa4b2b6b32aefb80faa300f82cc790cb949a35b8caede34f2b68c0e", size = 364931, upload-time = "2025-11-03T19:51:15.007Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "marshmallow" +version = "3.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "microsoft-kiota-abstractions" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "std-uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/6c/fd855a03545ae261b28d179b206e5f80a0e7c95fac5a580514c4dabedca0/microsoft_kiota_abstractions-1.9.7.tar.gz", hash = "sha256:731ed60c2df74ca80d1bf36d40a4c390aab353db3a76796c63ea9e9a220ce65c", size = 24447, upload-time = "2025-09-09T13:53:42.631Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/d8/d699a2cb209c72f1258af5f582a7868d1b006e57cc4394b68b0f996ba370/microsoft_kiota_abstractions-1.9.7-py3-none-any.whl", hash = "sha256:8add66c38d05ab9a496c1c843bb16e04b70edc4651dc290b9629b14009f5c0c0", size = 44404, upload-time = "2025-09-09T13:53:41.312Z" }, +] + +[[package]] +name = "microsoft-kiota-authentication-azure" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "azure-core" }, + { name = "microsoft-kiota-abstractions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/9a/3deb5d951e55e059fbde93deaf1b3fdd1ec3a6e8bdac01280c640dac7b8c/microsoft_kiota_authentication_azure-1.9.7.tar.gz", hash = "sha256:1ecef94097ca8029e5b903bfef8dbbf47ba75bc1521907164a84b6617226696b", size = 4987, upload-time = "2025-09-09T13:53:52.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/49/d12e7eabd6fc7039bfa301dfff26f0fced9bd164564b96b6d99fffcb020b/microsoft_kiota_authentication_azure-1.9.7-py3-none-any.whl", hash = "sha256:a2d776bef22d10be65df1ea9e8f1737e46981bd14cdb70e3fe4f4a066e92b139", size = 6908, upload-time = "2025-09-09T13:53:51.735Z" }, +] + +[[package]] +name = "microsoft-kiota-http" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "microsoft-kiota-abstractions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/a9/7efe67311902394a208545ae067dfc7e957383939b0ee6ff43e1955afbe7/microsoft_kiota_http-1.9.7.tar.gz", hash = "sha256:abcacca784649308ab93d8578c2afb581a42deed048b183d7bbdc48c325dd6a1", size = 21249, upload-time = "2025-09-09T13:54:00.45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/99/1d625b9353cabb3aaddb468c379b1e1fc726795281e94437096846b434b1/microsoft_kiota_http-1.9.7-py3-none-any.whl", hash = "sha256:14ce6b14c4fa93608f535f2c6ae21d35b1d0e2635ab70501fa3a3afc90135261", size = 31577, upload-time = "2025-09-09T13:53:59.616Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-form" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/d7/dc6d782f75608be4b1733df6592e4c7e819b6e32b290ac45304f74c0c0cf/microsoft_kiota_serialization_form-1.9.7.tar.gz", hash = "sha256:d3297a60778c0437513334b703225ce108fd109f13c1993afea599b85dc5a528", size = 8999, upload-time = "2025-09-09T13:54:08.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/62/9929fc1fe0ff76af5ff6dd8179b71c58105675465536141cd491e03f5a1d/microsoft_kiota_serialization_form-1.9.7-py3-none-any.whl", hash = "sha256:72d2dc5e57a993145702870ad89c85cebe3336d4d34f231d951ee1bc83ad11b9", size = 10671, upload-time = "2025-09-09T13:54:08.169Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-json" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/f8/e13c48e610a00f2abfa9fa19f03e2cf21fe98486dfc5a453ce6c0490d3f2/microsoft_kiota_serialization_json-1.9.7.tar.gz", hash = "sha256:1e54ff90b185fe21cca94ebbf8468bf44a2ca5f082c4cf04dbd2d42a9472837a", size = 9416, upload-time = "2025-09-09T13:54:18.299Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/6b/761e45c91086fb45e69ee1b85d538f6e5fc89b86f6ade148e8c5575259ce/microsoft_kiota_serialization_json-1.9.7-py3-none-any.whl", hash = "sha256:6f44012f00cf7c4c4d8b9195e7f8a691d186021b5d9a20e791a77c800b5be531", size = 11056, upload-time = "2025-09-09T13:54:17.395Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-multipart" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/23/31b11fd0e44bb79923ea8310c2f3d0bc1e16f56d35d2fc73203a260a0a73/microsoft_kiota_serialization_multipart-1.9.7.tar.gz", hash = "sha256:1a13d193d078dea86711d8c6e89ac142aff5033079c7be4061279b2da5c83ef8", size = 5150, upload-time = "2025-09-09T13:54:35.551Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/ca/98efd66c8e7180928fe2901f4c766799991836e536a8a0aca9186b5d7c7a/microsoft_kiota_serialization_multipart-1.9.7-py3-none-any.whl", hash = "sha256:cd72ee004039ee64a35bd5254afd3f8bc89877e948282ab0fe0a7efab75f68bb", size = 6651, upload-time = "2025-09-09T13:54:34.811Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-text" +version = "1.9.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/5c/479378981c7b8fb22d6ba693f07db457a18d3efc86dd083ebe31d6192d37/microsoft_kiota_serialization_text-1.9.7.tar.gz", hash = "sha256:d57a082d5c6ea1e650286314cac9a9e7a2662aa4beb80635bf4addd33d252bd5", size = 7306, upload-time = "2025-09-09T13:54:26.45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/8b/b8b6482719d9ecc4d87f07aa8726d33c18004e0630ef5cd2891ee8bf2ada/microsoft_kiota_serialization_text-1.9.7-py3-none-any.whl", hash = "sha256:47c4d774883bec269a6eb077a5ca2f26ae6715986c8defa374d536a9664dc43e", size = 8840, upload-time = "2025-09-09T13:54:25.642Z" }, +] + +[[package]] +name = "mmh3" +version = "5.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/af/f28c2c2f51f31abb4725f9a64bc7863d5f491f6539bd26aee2a1d21a649e/mmh3-5.2.0.tar.gz", hash = "sha256:1efc8fec8478e9243a78bb993422cf79f8ff85cb4cf6b79647480a31e0d950a8", size = 33582, upload-time = "2025-07-29T07:43:48.49Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/87/399567b3796e134352e11a8b973cd470c06b2ecfad5468fe580833be442b/mmh3-5.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7901c893e704ee3c65f92d39b951f8f34ccf8e8566768c58103fb10e55afb8c1", size = 56107, upload-time = "2025-07-29T07:41:57.07Z" }, + { url = "https://files.pythonhosted.org/packages/c3/09/830af30adf8678955b247d97d3d9543dd2fd95684f3cd41c0cd9d291da9f/mmh3-5.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5f5536b1cbfa72318ab3bfc8a8188b949260baed186b75f0abc75b95d8c051", size = 40635, upload-time = "2025-07-29T07:41:57.903Z" }, + { url = "https://files.pythonhosted.org/packages/07/14/eaba79eef55b40d653321765ac5e8f6c9ac38780b8a7c2a2f8df8ee0fb72/mmh3-5.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cedac4f4054b8f7859e5aed41aaa31ad03fce6851901a7fdc2af0275ac533c10", size = 40078, upload-time = "2025-07-29T07:41:58.772Z" }, + { url = "https://files.pythonhosted.org/packages/bb/26/83a0f852e763f81b2265d446b13ed6d49ee49e1fc0c47b9655977e6f3d81/mmh3-5.2.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:eb756caf8975882630ce4e9fbbeb9d3401242a72528230422c9ab3a0d278e60c", size = 97262, upload-time = "2025-07-29T07:41:59.678Z" }, + { url = "https://files.pythonhosted.org/packages/00/7d/b7133b10d12239aeaebf6878d7eaf0bf7d3738c44b4aba3c564588f6d802/mmh3-5.2.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:097e13c8b8a66c5753c6968b7640faefe85d8e38992703c1f666eda6ef4c3762", size = 103118, upload-time = "2025-07-29T07:42:01.197Z" }, + { url = "https://files.pythonhosted.org/packages/7b/3e/62f0b5dce2e22fd5b7d092aba285abd7959ea2b17148641e029f2eab1ffa/mmh3-5.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7c0c7845566b9686480e6a7e9044db4afb60038d5fabd19227443f0104eeee4", size = 106072, upload-time = "2025-07-29T07:42:02.601Z" }, + { url = "https://files.pythonhosted.org/packages/66/84/ea88bb816edfe65052c757a1c3408d65c4201ddbd769d4a287b0f1a628b2/mmh3-5.2.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:61ac226af521a572700f863d6ecddc6ece97220ce7174e311948ff8c8919a363", size = 112925, upload-time = "2025-07-29T07:42:03.632Z" }, + { url = "https://files.pythonhosted.org/packages/2e/13/c9b1c022807db575fe4db806f442d5b5784547e2e82cff36133e58ea31c7/mmh3-5.2.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:582f9dbeefe15c32a5fa528b79b088b599a1dfe290a4436351c6090f90ddebb8", size = 120583, upload-time = "2025-07-29T07:42:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5f/0e2dfe1a38f6a78788b7eb2b23432cee24623aeabbc907fed07fc17d6935/mmh3-5.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2ebfc46b39168ab1cd44670a32ea5489bcbc74a25795c61b6d888c5c2cf654ed", size = 99127, upload-time = "2025-07-29T07:42:05.929Z" }, + { url = "https://files.pythonhosted.org/packages/77/27/aefb7d663b67e6a0c4d61a513c83e39ba2237e8e4557fa7122a742a23de5/mmh3-5.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1556e31e4bd0ac0c17eaf220be17a09c171d7396919c3794274cb3415a9d3646", size = 98544, upload-time = "2025-07-29T07:42:06.87Z" }, + { url = "https://files.pythonhosted.org/packages/ab/97/a21cc9b1a7c6e92205a1b5fa030cdf62277d177570c06a239eca7bd6dd32/mmh3-5.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81df0dae22cd0da87f1c978602750f33d17fb3d21fb0f326c89dc89834fea79b", size = 106262, upload-time = "2025-07-29T07:42:07.804Z" }, + { url = "https://files.pythonhosted.org/packages/43/18/db19ae82ea63c8922a880e1498a75342311f8aa0c581c4dd07711473b5f7/mmh3-5.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:eba01ec3bd4a49b9ac5ca2bc6a73ff5f3af53374b8556fcc2966dd2af9eb7779", size = 109824, upload-time = "2025-07-29T07:42:08.735Z" }, + { url = "https://files.pythonhosted.org/packages/9f/f5/41dcf0d1969125fc6f61d8618b107c79130b5af50b18a4651210ea52ab40/mmh3-5.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e9a011469b47b752e7d20de296bb34591cdfcbe76c99c2e863ceaa2aa61113d2", size = 97255, upload-time = "2025-07-29T07:42:09.706Z" }, + { url = "https://files.pythonhosted.org/packages/32/b3/cce9eaa0efac1f0e735bb178ef9d1d2887b4927fe0ec16609d5acd492dda/mmh3-5.2.0-cp311-cp311-win32.whl", hash = "sha256:bc44fc2b886243d7c0d8daeb37864e16f232e5b56aaec27cc781d848264cfd28", size = 40779, upload-time = "2025-07-29T07:42:10.546Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e9/3fa0290122e6d5a7041b50ae500b8a9f4932478a51e48f209a3879fe0b9b/mmh3-5.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ebf241072cf2777a492d0e09252f8cc2b3edd07dfdb9404b9757bffeb4f2cee", size = 41549, upload-time = "2025-07-29T07:42:11.399Z" }, + { url = "https://files.pythonhosted.org/packages/3a/54/c277475b4102588e6f06b2e9095ee758dfe31a149312cdbf62d39a9f5c30/mmh3-5.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:b5f317a727bba0e633a12e71228bc6a4acb4f471a98b1c003163b917311ea9a9", size = 39336, upload-time = "2025-07-29T07:42:12.209Z" }, + { url = "https://files.pythonhosted.org/packages/bf/6a/d5aa7edb5c08e0bd24286c7d08341a0446f9a2fbbb97d96a8a6dd81935ee/mmh3-5.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:384eda9361a7bf83a85e09447e1feafe081034af9dd428893701b959230d84be", size = 56141, upload-time = "2025-07-29T07:42:13.456Z" }, + { url = "https://files.pythonhosted.org/packages/08/49/131d0fae6447bc4a7299ebdb1a6fb9d08c9f8dcf97d75ea93e8152ddf7ab/mmh3-5.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c9da0d568569cc87315cb063486d761e38458b8ad513fedd3dc9263e1b81bcd", size = 40681, upload-time = "2025-07-29T07:42:14.306Z" }, + { url = "https://files.pythonhosted.org/packages/8f/6f/9221445a6bcc962b7f5ff3ba18ad55bba624bacdc7aa3fc0a518db7da8ec/mmh3-5.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86d1be5d63232e6eb93c50881aea55ff06eb86d8e08f9b5417c8c9b10db9db96", size = 40062, upload-time = "2025-07-29T07:42:15.08Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d4/6bb2d0fef81401e0bb4c297d1eb568b767de4ce6fc00890bc14d7b51ecc4/mmh3-5.2.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bf7bee43e17e81671c447e9c83499f53d99bf440bc6d9dc26a841e21acfbe094", size = 97333, upload-time = "2025-07-29T07:42:16.436Z" }, + { url = "https://files.pythonhosted.org/packages/44/e0/ccf0daff8134efbb4fbc10a945ab53302e358c4b016ada9bf97a6bdd50c1/mmh3-5.2.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7aa18cdb58983ee660c9c400b46272e14fa253c675ed963d3812487f8ca42037", size = 103310, upload-time = "2025-07-29T07:42:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/02/63/1965cb08a46533faca0e420e06aff8bbaf9690a6f0ac6ae6e5b2e4544687/mmh3-5.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9d032488fcec32d22be6542d1a836f00247f40f320844dbb361393b5b22773", size = 106178, upload-time = "2025-07-29T07:42:19.281Z" }, + { url = "https://files.pythonhosted.org/packages/c2/41/c883ad8e2c234013f27f92061200afc11554ea55edd1bcf5e1accd803a85/mmh3-5.2.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1861fb6b1d0453ed7293200139c0a9011eeb1376632e048e3766945b13313c5", size = 113035, upload-time = "2025-07-29T07:42:20.356Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/1ccade8b1fa625d634a18bab7bf08a87457e09d5ec8cf83ca07cbea9d400/mmh3-5.2.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:99bb6a4d809aa4e528ddfe2c85dd5239b78b9dd14be62cca0329db78505e7b50", size = 120784, upload-time = "2025-07-29T07:42:21.377Z" }, + { url = "https://files.pythonhosted.org/packages/77/1c/919d9171fcbdcdab242e06394464ccf546f7d0f3b31e0d1e3a630398782e/mmh3-5.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1f8d8b627799f4e2fcc7c034fed8f5f24dc7724ff52f69838a3d6d15f1ad4765", size = 99137, upload-time = "2025-07-29T07:42:22.344Z" }, + { url = "https://files.pythonhosted.org/packages/66/8a/1eebef5bd6633d36281d9fc83cf2e9ba1ba0e1a77dff92aacab83001cee4/mmh3-5.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b5995088dd7023d2d9f310a0c67de5a2b2e06a570ecfd00f9ff4ab94a67cde43", size = 98664, upload-time = "2025-07-29T07:42:23.269Z" }, + { url = "https://files.pythonhosted.org/packages/13/41/a5d981563e2ee682b21fb65e29cc0f517a6734a02b581359edd67f9d0360/mmh3-5.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1a5f4d2e59d6bba8ef01b013c472741835ad961e7c28f50c82b27c57748744a4", size = 106459, upload-time = "2025-07-29T07:42:24.238Z" }, + { url = "https://files.pythonhosted.org/packages/24/31/342494cd6ab792d81e083680875a2c50fa0c5df475ebf0b67784f13e4647/mmh3-5.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fd6e6c3d90660d085f7e73710eab6f5545d4854b81b0135a3526e797009dbda3", size = 110038, upload-time = "2025-07-29T07:42:25.629Z" }, + { url = "https://files.pythonhosted.org/packages/28/44/efda282170a46bb4f19c3e2b90536513b1d821c414c28469a227ca5a1789/mmh3-5.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c4a2f3d83879e3de2eb8cbf562e71563a8ed15ee9b9c2e77ca5d9f73072ac15c", size = 97545, upload-time = "2025-07-29T07:42:27.04Z" }, + { url = "https://files.pythonhosted.org/packages/68/8f/534ae319c6e05d714f437e7206f78c17e66daca88164dff70286b0e8ea0c/mmh3-5.2.0-cp312-cp312-win32.whl", hash = "sha256:2421b9d665a0b1ad724ec7332fb5a98d075f50bc51a6ff854f3a1882bd650d49", size = 40805, upload-time = "2025-07-29T07:42:28.032Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f6/f6abdcfefcedab3c964868048cfe472764ed358c2bf6819a70dd4ed4ed3a/mmh3-5.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d80005b7634a3a2220f81fbeb94775ebd12794623bb2e1451701ea732b4aa3", size = 41597, upload-time = "2025-07-29T07:42:28.894Z" }, + { url = "https://files.pythonhosted.org/packages/15/fd/f7420e8cbce45c259c770cac5718badf907b302d3a99ec587ba5ce030237/mmh3-5.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:3d6bfd9662a20c054bc216f861fa330c2dac7c81e7fb8307b5e32ab5b9b4d2e0", size = 39350, upload-time = "2025-07-29T07:42:29.794Z" }, + { url = "https://files.pythonhosted.org/packages/d8/fa/27f6ab93995ef6ad9f940e96593c5dd24744d61a7389532b0fec03745607/mmh3-5.2.0-cp313-cp313-android_21_arm64_v8a.whl", hash = "sha256:e79c00eba78f7258e5b354eccd4d7907d60317ced924ea4a5f2e9d83f5453065", size = 40874, upload-time = "2025-07-29T07:42:30.662Z" }, + { url = "https://files.pythonhosted.org/packages/11/9c/03d13bcb6a03438bc8cac3d2e50f80908d159b31a4367c2e1a7a077ded32/mmh3-5.2.0-cp313-cp313-android_21_x86_64.whl", hash = "sha256:956127e663d05edbeec54df38885d943dfa27406594c411139690485128525de", size = 42012, upload-time = "2025-07-29T07:42:31.539Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/0865d9765408a7d504f1789944e678f74e0888b96a766d578cb80b040999/mmh3-5.2.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:c3dca4cb5b946ee91b3d6bb700d137b1cd85c20827f89fdf9c16258253489044", size = 39197, upload-time = "2025-07-29T07:42:32.374Z" }, + { url = "https://files.pythonhosted.org/packages/3e/12/76c3207bd186f98b908b6706c2317abb73756d23a4e68ea2bc94825b9015/mmh3-5.2.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:e651e17bfde5840e9e4174b01e9e080ce49277b70d424308b36a7969d0d1af73", size = 39840, upload-time = "2025-07-29T07:42:33.227Z" }, + { url = "https://files.pythonhosted.org/packages/5d/0d/574b6cce5555c9f2b31ea189ad44986755eb14e8862db28c8b834b8b64dc/mmh3-5.2.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:9f64bf06f4bf623325fda3a6d02d36cd69199b9ace99b04bb2d7fd9f89688504", size = 40644, upload-time = "2025-07-29T07:42:34.099Z" }, + { url = "https://files.pythonhosted.org/packages/52/82/3731f8640b79c46707f53ed72034a58baad400be908c87b0088f1f89f986/mmh3-5.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ddc63328889bcaee77b743309e5c7d2d52cee0d7d577837c91b6e7cc9e755e0b", size = 56153, upload-time = "2025-07-29T07:42:35.031Z" }, + { url = "https://files.pythonhosted.org/packages/4f/34/e02dca1d4727fd9fdeaff9e2ad6983e1552804ce1d92cc796e5b052159bb/mmh3-5.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bb0fdc451fb6d86d81ab8f23d881b8d6e37fc373a2deae1c02d27002d2ad7a05", size = 40684, upload-time = "2025-07-29T07:42:35.914Z" }, + { url = "https://files.pythonhosted.org/packages/8f/36/3dee40767356e104967e6ed6d102ba47b0b1ce2a89432239b95a94de1b89/mmh3-5.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b29044e1ffdb84fe164d0a7ea05c7316afea93c00f8ed9449cf357c36fc4f814", size = 40057, upload-time = "2025-07-29T07:42:36.755Z" }, + { url = "https://files.pythonhosted.org/packages/31/58/228c402fccf76eb39a0a01b8fc470fecf21965584e66453b477050ee0e99/mmh3-5.2.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:58981d6ea9646dbbf9e59a30890cbf9f610df0e4a57dbfe09215116fd90b0093", size = 97344, upload-time = "2025-07-29T07:42:37.675Z" }, + { url = "https://files.pythonhosted.org/packages/34/82/fc5ce89006389a6426ef28e326fc065b0fbaaed230373b62d14c889f47ea/mmh3-5.2.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e5634565367b6d98dc4aa2983703526ef556b3688ba3065edb4b9b90ede1c54", size = 103325, upload-time = "2025-07-29T07:42:38.591Z" }, + { url = "https://files.pythonhosted.org/packages/09/8c/261e85777c6aee1ebd53f2f17e210e7481d5b0846cd0b4a5c45f1e3761b8/mmh3-5.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0271ac12415afd3171ab9a3c7cbfc71dee2c68760a7dc9d05bf8ed6ddfa3a7a", size = 106240, upload-time = "2025-07-29T07:42:39.563Z" }, + { url = "https://files.pythonhosted.org/packages/70/73/2f76b3ad8a3d431824e9934403df36c0ddacc7831acf82114bce3c4309c8/mmh3-5.2.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:45b590e31bc552c6f8e2150ff1ad0c28dd151e9f87589e7eaf508fbdd8e8e908", size = 113060, upload-time = "2025-07-29T07:42:40.585Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b9/7ea61a34e90e50a79a9d87aa1c0b8139a7eaf4125782b34b7d7383472633/mmh3-5.2.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bdde97310d59604f2a9119322f61b31546748499a21b44f6715e8ced9308a6c5", size = 120781, upload-time = "2025-07-29T07:42:41.618Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5b/ae1a717db98c7894a37aeedbd94b3f99e6472a836488f36b6849d003485b/mmh3-5.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fc9c5f280438cf1c1a8f9abb87dc8ce9630a964120cfb5dd50d1e7ce79690c7a", size = 99174, upload-time = "2025-07-29T07:42:42.587Z" }, + { url = "https://files.pythonhosted.org/packages/e3/de/000cce1d799fceebb6d4487ae29175dd8e81b48e314cba7b4da90bcf55d7/mmh3-5.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c903e71fd8debb35ad2a4184c1316b3cb22f64ce517b4e6747f25b0a34e41266", size = 98734, upload-time = "2025-07-29T07:42:43.996Z" }, + { url = "https://files.pythonhosted.org/packages/79/19/0dc364391a792b72fbb22becfdeacc5add85cc043cd16986e82152141883/mmh3-5.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:eed4bba7ff8a0d37106ba931ab03bdd3915fbb025bcf4e1f0aa02bc8114960c5", size = 106493, upload-time = "2025-07-29T07:42:45.07Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b1/bc8c28e4d6e807bbb051fefe78e1156d7f104b89948742ad310612ce240d/mmh3-5.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1fdb36b940e9261aff0b5177c5b74a36936b902f473180f6c15bde26143681a9", size = 110089, upload-time = "2025-07-29T07:42:46.122Z" }, + { url = "https://files.pythonhosted.org/packages/3b/a2/d20f3f5c95e9c511806686c70d0a15479cc3941c5f322061697af1c1ff70/mmh3-5.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7303aab41e97adcf010a09efd8f1403e719e59b7705d5e3cfed3dd7571589290", size = 97571, upload-time = "2025-07-29T07:42:47.18Z" }, + { url = "https://files.pythonhosted.org/packages/7b/23/665296fce4f33488deec39a750ffd245cfc07aafb0e3ef37835f91775d14/mmh3-5.2.0-cp313-cp313-win32.whl", hash = "sha256:03e08c6ebaf666ec1e3d6ea657a2d363bb01effd1a9acfe41f9197decaef0051", size = 40806, upload-time = "2025-07-29T07:42:48.166Z" }, + { url = "https://files.pythonhosted.org/packages/59/b0/92e7103f3b20646e255b699e2d0327ce53a3f250e44367a99dc8be0b7c7a/mmh3-5.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:7fddccd4113e7b736706e17a239a696332360cbaddf25ae75b57ba1acce65081", size = 41600, upload-time = "2025-07-29T07:42:49.371Z" }, + { url = "https://files.pythonhosted.org/packages/99/22/0b2bd679a84574647de538c5b07ccaa435dbccc37815067fe15b90fe8dad/mmh3-5.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:fa0c966ee727aad5406d516375593c5f058c766b21236ab8985693934bb5085b", size = 39349, upload-time = "2025-07-29T07:42:50.268Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ca/a20db059a8a47048aaf550da14a145b56e9c7386fb8280d3ce2962dcebf7/mmh3-5.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:e5015f0bb6eb50008bed2d4b1ce0f2a294698a926111e4bb202c0987b4f89078", size = 39209, upload-time = "2025-07-29T07:42:51.559Z" }, + { url = "https://files.pythonhosted.org/packages/98/dd/e5094799d55c7482d814b979a0fd608027d0af1b274bfb4c3ea3e950bfd5/mmh3-5.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:e0f3ed828d709f5b82d8bfe14f8856120718ec4bd44a5b26102c3030a1e12501", size = 39843, upload-time = "2025-07-29T07:42:52.536Z" }, + { url = "https://files.pythonhosted.org/packages/f4/6b/7844d7f832c85400e7cc89a1348e4e1fdd38c5a38415bb5726bbb8fcdb6c/mmh3-5.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:f35727c5118aba95f0397e18a1a5b8405425581bfe53e821f0fb444cbdc2bc9b", size = 40648, upload-time = "2025-07-29T07:42:53.392Z" }, + { url = "https://files.pythonhosted.org/packages/1f/bf/71f791f48a21ff3190ba5225807cbe4f7223360e96862c376e6e3fb7efa7/mmh3-5.2.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bc244802ccab5220008cb712ca1508cb6a12f0eb64ad62997156410579a1770", size = 56164, upload-time = "2025-07-29T07:42:54.267Z" }, + { url = "https://files.pythonhosted.org/packages/70/1f/f87e3d34d83032b4f3f0f528c6d95a98290fcacf019da61343a49dccfd51/mmh3-5.2.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ff3d50dc3fe8a98059f99b445dfb62792b5d006c5e0b8f03c6de2813b8376110", size = 40692, upload-time = "2025-07-29T07:42:55.234Z" }, + { url = "https://files.pythonhosted.org/packages/a6/e2/db849eaed07117086f3452feca8c839d30d38b830ac59fe1ce65af8be5ad/mmh3-5.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:37a358cc881fe796e099c1db6ce07ff757f088827b4e8467ac52b7a7ffdca647", size = 40068, upload-time = "2025-07-29T07:42:56.158Z" }, + { url = "https://files.pythonhosted.org/packages/df/6b/209af927207af77425b044e32f77f49105a0b05d82ff88af6971d8da4e19/mmh3-5.2.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b9a87025121d1c448f24f27ff53a5fe7b6ef980574b4a4f11acaabe702420d63", size = 97367, upload-time = "2025-07-29T07:42:57.037Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e0/78adf4104c425606a9ce33fb351f790c76a6c2314969c4a517d1ffc92196/mmh3-5.2.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ba55d6ca32eeef8b2625e1e4bfc3b3db52bc63014bd7e5df8cc11bf2b036b12", size = 103306, upload-time = "2025-07-29T07:42:58.522Z" }, + { url = "https://files.pythonhosted.org/packages/a3/79/c2b89f91b962658b890104745b1b6c9ce38d50a889f000b469b91eeb1b9e/mmh3-5.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9ff37ba9f15637e424c2ab57a1a590c52897c845b768e4e0a4958084ec87f22", size = 106312, upload-time = "2025-07-29T07:42:59.552Z" }, + { url = "https://files.pythonhosted.org/packages/4b/14/659d4095528b1a209be90934778c5ffe312177d51e365ddcbca2cac2ec7c/mmh3-5.2.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a094319ec0db52a04af9fdc391b4d39a1bc72bc8424b47c4411afb05413a44b5", size = 113135, upload-time = "2025-07-29T07:43:00.745Z" }, + { url = "https://files.pythonhosted.org/packages/8d/6f/cd7734a779389a8a467b5c89a48ff476d6f2576e78216a37551a97e9e42a/mmh3-5.2.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c5584061fd3da584659b13587f26c6cad25a096246a481636d64375d0c1f6c07", size = 120775, upload-time = "2025-07-29T07:43:02.124Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ca/8256e3b96944408940de3f9291d7e38a283b5761fe9614d4808fcf27bd62/mmh3-5.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ecbfc0437ddfdced5e7822d1ce4855c9c64f46819d0fdc4482c53f56c707b935", size = 99178, upload-time = "2025-07-29T07:43:03.182Z" }, + { url = "https://files.pythonhosted.org/packages/8a/32/39e2b3cf06b6e2eb042c984dab8680841ac2a0d3ca6e0bea30db1f27b565/mmh3-5.2.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:7b986d506a8e8ea345791897ba5d8ba0d9d8820cd4fc3e52dbe6de19388de2e7", size = 98738, upload-time = "2025-07-29T07:43:04.207Z" }, + { url = "https://files.pythonhosted.org/packages/61/d3/7bbc8e0e8cf65ebbe1b893ffa0467b7ecd1bd07c3bbf6c9db4308ada22ec/mmh3-5.2.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:38d899a156549da8ef6a9f1d6f7ef231228d29f8f69bce2ee12f5fba6d6fd7c5", size = 106510, upload-time = "2025-07-29T07:43:05.656Z" }, + { url = "https://files.pythonhosted.org/packages/10/99/b97e53724b52374e2f3859046f0eb2425192da356cb19784d64bc17bb1cf/mmh3-5.2.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d86651fa45799530885ba4dab3d21144486ed15285e8784181a0ab37a4552384", size = 110053, upload-time = "2025-07-29T07:43:07.204Z" }, + { url = "https://files.pythonhosted.org/packages/ac/62/3688c7d975ed195155671df68788c83fed6f7909b6ec4951724c6860cb97/mmh3-5.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c463d7c1c4cfc9d751efeaadd936bbba07b5b0ed81a012b3a9f5a12f0872bd6e", size = 97546, upload-time = "2025-07-29T07:43:08.226Z" }, + { url = "https://files.pythonhosted.org/packages/ca/3b/c6153250f03f71a8b7634cded82939546cdfba02e32f124ff51d52c6f991/mmh3-5.2.0-cp314-cp314-win32.whl", hash = "sha256:bb4fe46bdc6104fbc28db7a6bacb115ee6368ff993366bbd8a2a7f0076e6f0c0", size = 41422, upload-time = "2025-07-29T07:43:09.216Z" }, + { url = "https://files.pythonhosted.org/packages/74/01/a27d98bab083a435c4c07e9d1d720d4c8a578bf4c270bae373760b1022be/mmh3-5.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c7f0b342fd06044bedd0b6e72177ddc0076f54fd89ee239447f8b271d919d9b", size = 42135, upload-time = "2025-07-29T07:43:10.183Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c9/dbba5507e95429b8b380e2ba091eff5c20a70a59560934dff0ad8392b8c8/mmh3-5.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:3193752fc05ea72366c2b63ff24b9a190f422e32d75fdeae71087c08fff26115", size = 39879, upload-time = "2025-07-29T07:43:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/b5/d1/c8c0ef839c17258b9de41b84f663574fabcf8ac2007b7416575e0f65ff6e/mmh3-5.2.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:69fc339d7202bea69ef9bd7c39bfdf9fdabc8e6822a01eba62fb43233c1b3932", size = 57696, upload-time = "2025-07-29T07:43:11.989Z" }, + { url = "https://files.pythonhosted.org/packages/2f/55/95e2b9ff201e89f9fe37036037ab61a6c941942b25cdb7b6a9df9b931993/mmh3-5.2.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:12da42c0a55c9d86ab566395324213c319c73ecb0c239fad4726324212b9441c", size = 41421, upload-time = "2025-07-29T07:43:13.269Z" }, + { url = "https://files.pythonhosted.org/packages/77/79/9be23ad0b7001a4b22752e7693be232428ecc0a35068a4ff5c2f14ef8b20/mmh3-5.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f7f9034c7cf05ddfaac8d7a2e63a3c97a840d4615d0a0e65ba8bdf6f8576e3be", size = 40853, upload-time = "2025-07-29T07:43:14.888Z" }, + { url = "https://files.pythonhosted.org/packages/ac/1b/96b32058eda1c1dee8264900c37c359a7325c1f11f5ff14fd2be8e24eff9/mmh3-5.2.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:11730eeb16dfcf9674fdea9bb6b8e6dd9b40813b7eb839bc35113649eef38aeb", size = 109694, upload-time = "2025-07-29T07:43:15.816Z" }, + { url = "https://files.pythonhosted.org/packages/8d/6f/a2ae44cd7dad697b6dea48390cbc977b1e5ca58fda09628cbcb2275af064/mmh3-5.2.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:932a6eec1d2e2c3c9e630d10f7128d80e70e2d47fe6b8c7ea5e1afbd98733e65", size = 117438, upload-time = "2025-07-29T07:43:16.865Z" }, + { url = "https://files.pythonhosted.org/packages/a0/08/bfb75451c83f05224a28afeaf3950c7b793c0b71440d571f8e819cfb149a/mmh3-5.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ca975c51c5028947bbcfc24966517aac06a01d6c921e30f7c5383c195f87991", size = 120409, upload-time = "2025-07-29T07:43:18.207Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ea/8b118b69b2ff8df568f742387d1a159bc654a0f78741b31437dd047ea28e/mmh3-5.2.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5b0b58215befe0f0e120b828f7645e97719bbba9f23b69e268ed0ac7adde8645", size = 125909, upload-time = "2025-07-29T07:43:19.39Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/168cc0b6a30650032e351a3b89b8a47382da541993a03af91e1ba2501234/mmh3-5.2.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29c2b9ce61886809d0492a274a5a53047742dea0f703f9c4d5d223c3ea6377d3", size = 135331, upload-time = "2025-07-29T07:43:20.435Z" }, + { url = "https://files.pythonhosted.org/packages/31/05/e3a9849b1c18a7934c64e831492c99e67daebe84a8c2f2c39a7096a830e3/mmh3-5.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a367d4741ac0103f8198c82f429bccb9359f543ca542b06a51f4f0332e8de279", size = 110085, upload-time = "2025-07-29T07:43:21.92Z" }, + { url = "https://files.pythonhosted.org/packages/d9/d5/a96bcc306e3404601418b2a9a370baec92af84204528ba659fdfe34c242f/mmh3-5.2.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:5a5dba98e514fb26241868f6eb90a7f7ca0e039aed779342965ce24ea32ba513", size = 111195, upload-time = "2025-07-29T07:43:23.066Z" }, + { url = "https://files.pythonhosted.org/packages/af/29/0fd49801fec5bff37198684e0849b58e0dab3a2a68382a357cfffb0fafc3/mmh3-5.2.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:941603bfd75a46023807511c1ac2f1b0f39cccc393c15039969806063b27e6db", size = 116919, upload-time = "2025-07-29T07:43:24.178Z" }, + { url = "https://files.pythonhosted.org/packages/2d/04/4f3c32b0a2ed762edca45d8b46568fc3668e34f00fb1e0a3b5451ec1281c/mmh3-5.2.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:132dd943451a7c7546978863d2f5a64977928410782e1a87d583cb60eb89e667", size = 123160, upload-time = "2025-07-29T07:43:25.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/76/3d29eaa38821730633d6a240d36fa8ad2807e9dfd432c12e1a472ed211eb/mmh3-5.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f698733a8a494466432d611a8f0d1e026f5286dee051beea4b3c3146817e35d5", size = 110206, upload-time = "2025-07-29T07:43:26.699Z" }, + { url = "https://files.pythonhosted.org/packages/44/1c/ccf35892684d3a408202e296e56843743e0b4fb1629e59432ea88cdb3909/mmh3-5.2.0-cp314-cp314t-win32.whl", hash = "sha256:6d541038b3fc360ec538fc116de87462627944765a6750308118f8b509a8eec7", size = 41970, upload-time = "2025-07-29T07:43:27.666Z" }, + { url = "https://files.pythonhosted.org/packages/75/b2/b9e4f1e5adb5e21eb104588fcee2cd1eaa8308255173481427d5ecc4284e/mmh3-5.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e912b19cf2378f2967d0c08e86ff4c6c360129887f678e27e4dde970d21b3f4d", size = 43063, upload-time = "2025-07-29T07:43:28.582Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0e61d9a4e29c8679356795a40e48f647b4aad58d71bfc969f0f8f56fb912/mmh3-5.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:e7884931fe5e788163e7b3c511614130c2c59feffdc21112290a194487efb2e9", size = 40455, upload-time = "2025-07-29T07:43:29.563Z" }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, +] + +[[package]] +name = "msal" +version = "1.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, +] + +[[package]] +name = "msal-extensions" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, +] + +[[package]] +name = "msgraph-core" +version = "1.3.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "microsoft-kiota-abstractions" }, + { name = "microsoft-kiota-authentication-azure" }, + { name = "microsoft-kiota-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/4e/123f9530ec43b306c597bb830c62bedab830ffa76e0edf33ea88a26f756e/msgraph_core-1.3.8.tar.gz", hash = "sha256:6e883f9d4c4ad57501234749e07b010478c1a5f19550ef4cf005bbcac4a63ae7", size = 25506, upload-time = "2025-09-11T22:46:57.267Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/4d/01432f60727ae452787014cad0d5bc9e035c6e11a670f12c23f7fc926d90/msgraph_core-1.3.8-py3-none-any.whl", hash = "sha256:86d83edcf62119946f201d13b7e857c947ef67addb088883940197081de85bea", size = 34473, upload-time = "2025-09-11T22:46:56.026Z" }, +] + +[[package]] +name = "msgraph-sdk" +version = "1.45.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-identity" }, + { name = "microsoft-kiota-serialization-form" }, + { name = "microsoft-kiota-serialization-json" }, + { name = "microsoft-kiota-serialization-multipart" }, + { name = "microsoft-kiota-serialization-text" }, + { name = "msgraph-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/af/70b6c875b9dc57ebe4ef315a2c7eed64ff9ea44e12de26b604a284b2fb60/msgraph_sdk-1.45.0.tar.gz", hash = "sha256:b23ab2260604ae85f17d668a93b79eebb1861c02b91e144bea32975b452f7709", size = 6050549, upload-time = "2025-09-16T20:42:40.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/30/10617d95c604101967300b9b68e7efa7bd3b57feda32522418e1bbfb2e52/msgraph_sdk-1.45.0-py3-none-any.whl", hash = "sha256:298daf1eaed5019c79ba85306c4e526502317ebbd1369a19ba53cee3c5bac09d", size = 24775024, upload-time = "2025-09-16T20:42:37.031Z" }, +] + +[[package]] +name = "multidict" +version = "6.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/7f/0652e6ed47ab288e3756ea9c0df8b14950781184d4bd7883f4d87dd41245/multidict-6.6.4.tar.gz", hash = "sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd", size = 101843, upload-time = "2025-08-11T12:08:48.217Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/7f/90a7f01e2d005d6653c689039977f6856718c75c5579445effb7e60923d1/multidict-6.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c", size = 76472, upload-time = "2025-08-11T12:06:29.006Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/bed07bc9e2bb302ce752f1dabc69e884cd6a676da44fb0e501b246031fdd/multidict-6.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb", size = 44634, upload-time = "2025-08-11T12:06:30.374Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/ceeb4f8f33cf81277da464307afeaf164fb0297947642585884f5cad4f28/multidict-6.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e", size = 44282, upload-time = "2025-08-11T12:06:31.958Z" }, + { url = "https://files.pythonhosted.org/packages/03/35/436a5da8702b06866189b69f655ffdb8f70796252a8772a77815f1812679/multidict-6.6.4-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded", size = 229696, upload-time = "2025-08-11T12:06:33.087Z" }, + { url = "https://files.pythonhosted.org/packages/b6/0e/915160be8fecf1fca35f790c08fb74ca684d752fcba62c11daaf3d92c216/multidict-6.6.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683", size = 246665, upload-time = "2025-08-11T12:06:34.448Z" }, + { url = "https://files.pythonhosted.org/packages/08/ee/2f464330acd83f77dcc346f0b1a0eaae10230291450887f96b204b8ac4d3/multidict-6.6.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a", size = 225485, upload-time = "2025-08-11T12:06:35.672Z" }, + { url = "https://files.pythonhosted.org/packages/71/cc/9a117f828b4d7fbaec6adeed2204f211e9caf0a012692a1ee32169f846ae/multidict-6.6.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9", size = 257318, upload-time = "2025-08-11T12:06:36.98Z" }, + { url = "https://files.pythonhosted.org/packages/25/77/62752d3dbd70e27fdd68e86626c1ae6bccfebe2bb1f84ae226363e112f5a/multidict-6.6.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50", size = 254689, upload-time = "2025-08-11T12:06:38.233Z" }, + { url = "https://files.pythonhosted.org/packages/00/6e/fac58b1072a6fc59af5e7acb245e8754d3e1f97f4f808a6559951f72a0d4/multidict-6.6.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52", size = 246709, upload-time = "2025-08-11T12:06:39.517Z" }, + { url = "https://files.pythonhosted.org/packages/01/ef/4698d6842ef5e797c6db7744b0081e36fb5de3d00002cc4c58071097fac3/multidict-6.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6", size = 243185, upload-time = "2025-08-11T12:06:40.796Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c9/d82e95ae1d6e4ef396934e9b0e942dfc428775f9554acf04393cce66b157/multidict-6.6.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e", size = 237838, upload-time = "2025-08-11T12:06:42.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/cf/f94af5c36baaa75d44fab9f02e2a6bcfa0cd90acb44d4976a80960759dbc/multidict-6.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3", size = 246368, upload-time = "2025-08-11T12:06:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/4a/fe/29f23460c3d995f6a4b678cb2e9730e7277231b981f0b234702f0177818a/multidict-6.6.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c", size = 253339, upload-time = "2025-08-11T12:06:45.597Z" }, + { url = "https://files.pythonhosted.org/packages/29/b6/fd59449204426187b82bf8a75f629310f68c6adc9559dc922d5abe34797b/multidict-6.6.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b", size = 246933, upload-time = "2025-08-11T12:06:46.841Z" }, + { url = "https://files.pythonhosted.org/packages/19/52/d5d6b344f176a5ac3606f7a61fb44dc746e04550e1a13834dff722b8d7d6/multidict-6.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f", size = 242225, upload-time = "2025-08-11T12:06:48.588Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d3/5b2281ed89ff4d5318d82478a2a2450fcdfc3300da48ff15c1778280ad26/multidict-6.6.4-cp311-cp311-win32.whl", hash = "sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2", size = 41306, upload-time = "2025-08-11T12:06:49.95Z" }, + { url = "https://files.pythonhosted.org/packages/74/7d/36b045c23a1ab98507aefd44fd8b264ee1dd5e5010543c6fccf82141ccef/multidict-6.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e", size = 46029, upload-time = "2025-08-11T12:06:51.082Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5e/553d67d24432c5cd52b49047f2d248821843743ee6d29a704594f656d182/multidict-6.6.4-cp311-cp311-win_arm64.whl", hash = "sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf", size = 43017, upload-time = "2025-08-11T12:06:52.243Z" }, + { url = "https://files.pythonhosted.org/packages/05/f6/512ffd8fd8b37fb2680e5ac35d788f1d71bbaf37789d21a820bdc441e565/multidict-6.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8", size = 76516, upload-time = "2025-08-11T12:06:53.393Z" }, + { url = "https://files.pythonhosted.org/packages/99/58/45c3e75deb8855c36bd66cc1658007589662ba584dbf423d01df478dd1c5/multidict-6.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3", size = 45394, upload-time = "2025-08-11T12:06:54.555Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/e8c4472a93a26e4507c0b8e1f0762c0d8a32de1328ef72fd704ef9cc5447/multidict-6.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b", size = 43591, upload-time = "2025-08-11T12:06:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/05/51/edf414f4df058574a7265034d04c935aa84a89e79ce90fcf4df211f47b16/multidict-6.6.4-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287", size = 237215, upload-time = "2025-08-11T12:06:57.213Z" }, + { url = "https://files.pythonhosted.org/packages/c8/45/8b3d6dbad8cf3252553cc41abea09ad527b33ce47a5e199072620b296902/multidict-6.6.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138", size = 258299, upload-time = "2025-08-11T12:06:58.946Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e8/8ca2e9a9f5a435fc6db40438a55730a4bf4956b554e487fa1b9ae920f825/multidict-6.6.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6", size = 242357, upload-time = "2025-08-11T12:07:00.301Z" }, + { url = "https://files.pythonhosted.org/packages/0f/84/80c77c99df05a75c28490b2af8f7cba2a12621186e0a8b0865d8e745c104/multidict-6.6.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9", size = 268369, upload-time = "2025-08-11T12:07:01.638Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e9/920bfa46c27b05fb3e1ad85121fd49f441492dca2449c5bcfe42e4565d8a/multidict-6.6.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c", size = 269341, upload-time = "2025-08-11T12:07:02.943Z" }, + { url = "https://files.pythonhosted.org/packages/af/65/753a2d8b05daf496f4a9c367fe844e90a1b2cac78e2be2c844200d10cc4c/multidict-6.6.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402", size = 256100, upload-time = "2025-08-11T12:07:04.564Z" }, + { url = "https://files.pythonhosted.org/packages/09/54/655be13ae324212bf0bc15d665a4e34844f34c206f78801be42f7a0a8aaa/multidict-6.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7", size = 253584, upload-time = "2025-08-11T12:07:05.914Z" }, + { url = "https://files.pythonhosted.org/packages/5c/74/ab2039ecc05264b5cec73eb018ce417af3ebb384ae9c0e9ed42cb33f8151/multidict-6.6.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f", size = 251018, upload-time = "2025-08-11T12:07:08.301Z" }, + { url = "https://files.pythonhosted.org/packages/af/0a/ccbb244ac848e56c6427f2392741c06302bbfba49c0042f1eb3c5b606497/multidict-6.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d", size = 251477, upload-time = "2025-08-11T12:07:10.248Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b0/0ed49bba775b135937f52fe13922bc64a7eaf0a3ead84a36e8e4e446e096/multidict-6.6.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7", size = 263575, upload-time = "2025-08-11T12:07:11.928Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d9/7fb85a85e14de2e44dfb6a24f03c41e2af8697a6df83daddb0e9b7569f73/multidict-6.6.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802", size = 259649, upload-time = "2025-08-11T12:07:13.244Z" }, + { url = "https://files.pythonhosted.org/packages/03/9e/b3a459bcf9b6e74fa461a5222a10ff9b544cb1cd52fd482fb1b75ecda2a2/multidict-6.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24", size = 251505, upload-time = "2025-08-11T12:07:14.57Z" }, + { url = "https://files.pythonhosted.org/packages/86/a2/8022f78f041dfe6d71e364001a5cf987c30edfc83c8a5fb7a3f0974cff39/multidict-6.6.4-cp312-cp312-win32.whl", hash = "sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793", size = 41888, upload-time = "2025-08-11T12:07:15.904Z" }, + { url = "https://files.pythonhosted.org/packages/c7/eb/d88b1780d43a56db2cba24289fa744a9d216c1a8546a0dc3956563fd53ea/multidict-6.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e", size = 46072, upload-time = "2025-08-11T12:07:17.045Z" }, + { url = "https://files.pythonhosted.org/packages/9f/16/b929320bf5750e2d9d4931835a4c638a19d2494a5b519caaaa7492ebe105/multidict-6.6.4-cp312-cp312-win_arm64.whl", hash = "sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364", size = 43222, upload-time = "2025-08-11T12:07:18.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/e1db626f64f60008320aab00fbe4f23fc3300d75892a3381275b3d284580/multidict-6.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e", size = 75848, upload-time = "2025-08-11T12:07:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/4c/aa/8b6f548d839b6c13887253af4e29c939af22a18591bfb5d0ee6f1931dae8/multidict-6.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657", size = 45060, upload-time = "2025-08-11T12:07:21.163Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/f5e97e5d99a729bc2aa58eb3ebfa9f1e56a9b517cc38c60537c81834a73f/multidict-6.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da", size = 43269, upload-time = "2025-08-11T12:07:22.392Z" }, + { url = "https://files.pythonhosted.org/packages/dc/31/d54eb0c62516776f36fe67f84a732f97e0b0e12f98d5685bebcc6d396910/multidict-6.6.4-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa", size = 237158, upload-time = "2025-08-11T12:07:23.636Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1c/8a10c1c25b23156e63b12165a929d8eb49a6ed769fdbefb06e6f07c1e50d/multidict-6.6.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f", size = 257076, upload-time = "2025-08-11T12:07:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/ad/86/90e20b5771d6805a119e483fd3d1e8393e745a11511aebca41f0da38c3e2/multidict-6.6.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0", size = 240694, upload-time = "2025-08-11T12:07:26.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/49/484d3e6b535bc0555b52a0a26ba86e4d8d03fd5587d4936dc59ba7583221/multidict-6.6.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879", size = 266350, upload-time = "2025-08-11T12:07:27.94Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/aa4c5c379b11895083d50021e229e90c408d7d875471cb3abf721e4670d6/multidict-6.6.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a", size = 267250, upload-time = "2025-08-11T12:07:29.303Z" }, + { url = "https://files.pythonhosted.org/packages/80/e5/5e22c5bf96a64bdd43518b1834c6d95a4922cc2066b7d8e467dae9b6cee6/multidict-6.6.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f", size = 254900, upload-time = "2025-08-11T12:07:30.764Z" }, + { url = "https://files.pythonhosted.org/packages/17/38/58b27fed927c07035abc02befacab42491e7388ca105e087e6e0215ead64/multidict-6.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5", size = 252355, upload-time = "2025-08-11T12:07:32.205Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a1/dad75d23a90c29c02b5d6f3d7c10ab36c3197613be5d07ec49c7791e186c/multidict-6.6.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438", size = 250061, upload-time = "2025-08-11T12:07:33.623Z" }, + { url = "https://files.pythonhosted.org/packages/b8/1a/ac2216b61c7f116edab6dc3378cca6c70dc019c9a457ff0d754067c58b20/multidict-6.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e", size = 249675, upload-time = "2025-08-11T12:07:34.958Z" }, + { url = "https://files.pythonhosted.org/packages/d4/79/1916af833b800d13883e452e8e0977c065c4ee3ab7a26941fbfdebc11895/multidict-6.6.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7", size = 261247, upload-time = "2025-08-11T12:07:36.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/d1f84fe08ac44a5fc7391cbc20a7cedc433ea616b266284413fd86062f8c/multidict-6.6.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812", size = 257960, upload-time = "2025-08-11T12:07:39.735Z" }, + { url = "https://files.pythonhosted.org/packages/13/b5/29ec78057d377b195ac2c5248c773703a6b602e132a763e20ec0457e7440/multidict-6.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a", size = 250078, upload-time = "2025-08-11T12:07:41.525Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0e/7e79d38f70a872cae32e29b0d77024bef7834b0afb406ddae6558d9e2414/multidict-6.6.4-cp313-cp313-win32.whl", hash = "sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69", size = 41708, upload-time = "2025-08-11T12:07:43.405Z" }, + { url = "https://files.pythonhosted.org/packages/9d/34/746696dffff742e97cd6a23da953e55d0ea51fa601fa2ff387b3edcfaa2c/multidict-6.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf", size = 45912, upload-time = "2025-08-11T12:07:45.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/87/3bac136181e271e29170d8d71929cdeddeb77f3e8b6a0c08da3a8e9da114/multidict-6.6.4-cp313-cp313-win_arm64.whl", hash = "sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605", size = 43076, upload-time = "2025-08-11T12:07:46.746Z" }, + { url = "https://files.pythonhosted.org/packages/64/94/0a8e63e36c049b571c9ae41ee301ada29c3fee9643d9c2548d7d558a1d99/multidict-6.6.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb", size = 82812, upload-time = "2025-08-11T12:07:48.402Z" }, + { url = "https://files.pythonhosted.org/packages/25/1a/be8e369dfcd260d2070a67e65dd3990dd635cbd735b98da31e00ea84cd4e/multidict-6.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e", size = 48313, upload-time = "2025-08-11T12:07:49.679Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/dd4ade298674b2f9a7b06a32c94ffbc0497354df8285f27317c66433ce3b/multidict-6.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f", size = 46777, upload-time = "2025-08-11T12:07:51.318Z" }, + { url = "https://files.pythonhosted.org/packages/89/db/98aa28bc7e071bfba611ac2ae803c24e96dd3a452b4118c587d3d872c64c/multidict-6.6.4-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773", size = 229321, upload-time = "2025-08-11T12:07:52.965Z" }, + { url = "https://files.pythonhosted.org/packages/c7/bc/01ddda2a73dd9d167bd85d0e8ef4293836a8f82b786c63fb1a429bc3e678/multidict-6.6.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e", size = 249954, upload-time = "2025-08-11T12:07:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/06/78/6b7c0f020f9aa0acf66d0ab4eb9f08375bac9a50ff5e3edb1c4ccd59eafc/multidict-6.6.4-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0", size = 228612, upload-time = "2025-08-11T12:07:55.914Z" }, + { url = "https://files.pythonhosted.org/packages/00/44/3faa416f89b2d5d76e9d447296a81521e1c832ad6e40b92f990697b43192/multidict-6.6.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395", size = 257528, upload-time = "2025-08-11T12:07:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/05/5f/77c03b89af0fcb16f018f668207768191fb9dcfb5e3361a5e706a11db2c9/multidict-6.6.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45", size = 256329, upload-time = "2025-08-11T12:07:58.844Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e9/ed750a2a9afb4f8dc6f13dc5b67b514832101b95714f1211cd42e0aafc26/multidict-6.6.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb", size = 247928, upload-time = "2025-08-11T12:08:01.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/b5/e0571bc13cda277db7e6e8a532791d4403dacc9850006cb66d2556e649c0/multidict-6.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5", size = 245228, upload-time = "2025-08-11T12:08:02.96Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a3/69a84b0eccb9824491f06368f5b86e72e4af54c3067c37c39099b6687109/multidict-6.6.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141", size = 235869, upload-time = "2025-08-11T12:08:04.746Z" }, + { url = "https://files.pythonhosted.org/packages/a9/9d/28802e8f9121a6a0804fa009debf4e753d0a59969ea9f70be5f5fdfcb18f/multidict-6.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d", size = 243446, upload-time = "2025-08-11T12:08:06.332Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/6c98add069b4878c1d66428a5f5149ddb6d32b1f9836a826ac764b9940be/multidict-6.6.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d", size = 252299, upload-time = "2025-08-11T12:08:07.931Z" }, + { url = "https://files.pythonhosted.org/packages/3a/09/8fe02d204473e14c0af3affd50af9078839dfca1742f025cca765435d6b4/multidict-6.6.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0", size = 246926, upload-time = "2025-08-11T12:08:09.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/3d/7b1e10d774a6df5175ecd3c92bff069e77bed9ec2a927fdd4ff5fe182f67/multidict-6.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92", size = 243383, upload-time = "2025-08-11T12:08:10.981Z" }, + { url = "https://files.pythonhosted.org/packages/50/b0/a6fae46071b645ae98786ab738447de1ef53742eaad949f27e960864bb49/multidict-6.6.4-cp313-cp313t-win32.whl", hash = "sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e", size = 47775, upload-time = "2025-08-11T12:08:12.439Z" }, + { url = "https://files.pythonhosted.org/packages/b2/0a/2436550b1520091af0600dff547913cb2d66fbac27a8c33bc1b1bccd8d98/multidict-6.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4", size = 53100, upload-time = "2025-08-11T12:08:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/97/ea/43ac51faff934086db9c072a94d327d71b7d8b40cd5dcb47311330929ef0/multidict-6.6.4-cp313-cp313t-win_arm64.whl", hash = "sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad", size = 45501, upload-time = "2025-08-11T12:08:15.173Z" }, + { url = "https://files.pythonhosted.org/packages/fd/69/b547032297c7e63ba2af494edba695d781af8a0c6e89e4d06cf848b21d80/multidict-6.6.4-py3-none-any.whl", hash = "sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c", size = 12313, upload-time = "2025-08-11T12:08:46.891Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "networkx" +version = "3.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065, upload-time = "2025-05-29T11:35:07.804Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" }, +] + +[[package]] +name = "nltk" +version = "3.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "joblib" }, + { name = "regex" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "numba" +version = "0.61.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llvmlite" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, + { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, + { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, + { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, + { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f3/0fe4c1b1f2569e8a18ad90c159298d862f96c3964392a20d74fc628aee44/numba-0.61.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:3a10a8fc9afac40b1eac55717cece1b8b1ac0b946f5065c89e00bde646b5b154", size = 2771785, upload-time = "2025-04-09T02:57:59.96Z" }, + { url = "https://files.pythonhosted.org/packages/e9/71/91b277d712e46bd5059f8a5866862ed1116091a7cb03bd2704ba8ebe015f/numba-0.61.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d3bcada3c9afba3bed413fba45845f2fb9cd0d2b27dd58a1be90257e293d140", size = 2773289, upload-time = "2025-04-09T02:58:01.435Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e0/5ea04e7ad2c39288c0f0f9e8d47638ad70f28e275d092733b5817cf243c9/numba-0.61.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bdbca73ad81fa196bd53dc12e3aaf1564ae036e0c125f237c7644fe64a4928ab", size = 3893918, upload-time = "2025-04-09T02:58:02.933Z" }, + { url = "https://files.pythonhosted.org/packages/17/58/064f4dcb7d7e9412f16ecf80ed753f92297e39f399c905389688cf950b81/numba-0.61.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5f154aaea625fb32cfbe3b80c5456d514d416fcdf79733dd69c0df3a11348e9e", size = 3584056, upload-time = "2025-04-09T02:58:04.538Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/6d3a0f2d3989e62a18749e1e9913d5fa4910bbb3e3311a035baea6caf26d/numba-0.61.2-cp313-cp313-win_amd64.whl", hash = "sha256:59321215e2e0ac5fa928a8020ab00b8e57cda8a97384963ac0dfa4d4e6aa54e7", size = 2831846, upload-time = "2025-04-09T02:58:06.125Z" }, +] + +[[package]] +name = "numpy" +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613, upload-time = "2024-02-05T23:56:56.054Z" }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172, upload-time = "2024-02-05T23:57:21.56Z" }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643, upload-time = "2024-02-05T23:57:56.585Z" }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803, upload-time = "2024-02-05T23:58:08.963Z" }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" }, +] + +[[package]] +name = "nvidia-cublas-cu12" +version = "12.8.4.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.8.93" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.10.2.21" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, +] + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.3.3.83" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, +] + +[[package]] +name = "nvidia-cufile-cu12" +version = "1.13.1.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.9.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.7.3.90" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "nvidia-cusparse-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, +] + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.5.8.93" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, +] + +[[package]] +name = "nvidia-cusparselt-cu12" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, +] + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.27.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/5b/4e4fff7bad39adf89f735f2bc87248c81db71205b62bcc0d5ca5b606b3c3/nvidia_nccl_cu12-2.27.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adf27ccf4238253e0b826bce3ff5fa532d65fc42322c8bfdfaf28024c0fbe039", size = 322364134, upload-time = "2025-06-03T21:58:04.013Z" }, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.8.93" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, +] + +[[package]] +name = "olefile" +version = "0.47" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240, upload-time = "2023-12-01T16:22:53.025Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565, upload-time = "2023-12-01T16:22:51.518Z" }, +] + +[[package]] +name = "ollama" +version = "0.4.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/4d/f46ff3d124ce0805cf728a443cfb0227beb025256cb9276a6f71521c19bd/ollama-0.4.9.tar.gz", hash = "sha256:5266d4d29b5089a01489872b8e8f980f018bccbdd1082b3903448af1d5615ce7", size = 40875, upload-time = "2025-05-27T18:09:27.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/b6/0c40493c0505652d3da58ad048be19f00c4bdf587140cc578a770d2029d4/ollama-0.4.9-py3-none-any.whl", hash = "sha256:18c8c85358c54d7f73d6a66cda495b0e3ba99fdb88f824ae470d740fbb211a50", size = 13303, upload-time = "2025-05-27T18:09:26.147Z" }, +] + +[[package]] +name = "onnxruntime" +version = "1.23.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coloredlogs" }, + { name = "flatbuffers" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "protobuf" }, + { name = "sympy" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/be/467b00f09061572f022ffd17e49e49e5a7a789056bad95b54dfd3bee73ff/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:6f91d2c9b0965e86827a5ba01531d5b669770b01775b23199565d6c1f136616c", size = 17196113, upload-time = "2025-10-22T03:47:33.526Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a8/3c23a8f75f93122d2b3410bfb74d06d0f8da4ac663185f91866b03f7da1b/onnxruntime-1.23.2-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:87d8b6eaf0fbeb6835a60a4265fde7a3b60157cf1b2764773ac47237b4d48612", size = 19153857, upload-time = "2025-10-22T03:46:37.578Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d8/506eed9af03d86f8db4880a4c47cd0dffee973ef7e4f4cff9f1d4bcf7d22/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbfd2fca76c855317568c1b36a885ddea2272c13cb0e395002c402f2360429a6", size = 15220095, upload-time = "2025-10-22T03:46:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/e9/80/113381ba832d5e777accedc6cb41d10f9eca82321ae31ebb6bcede530cea/onnxruntime-1.23.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da44b99206e77734c5819aa2142c69e64f3b46edc3bd314f6a45a932defc0b3e", size = 17372080, upload-time = "2025-10-22T03:47:00.265Z" }, + { url = "https://files.pythonhosted.org/packages/3a/db/1b4a62e23183a0c3fe441782462c0ede9a2a65c6bbffb9582fab7c7a0d38/onnxruntime-1.23.2-cp311-cp311-win_amd64.whl", hash = "sha256:902c756d8b633ce0dedd889b7c08459433fbcf35e9c38d1c03ddc020f0648c6e", size = 13468349, upload-time = "2025-10-22T03:47:25.783Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9e/f748cd64161213adeef83d0cb16cb8ace1e62fa501033acdd9f9341fff57/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b8f029a6b98d3cf5be564d52802bb50a8489ab73409fa9db0bf583eabb7c2321", size = 17195929, upload-time = "2025-10-22T03:47:36.24Z" }, + { url = "https://files.pythonhosted.org/packages/91/9d/a81aafd899b900101988ead7fb14974c8a58695338ab6a0f3d6b0100f30b/onnxruntime-1.23.2-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:218295a8acae83905f6f1aed8cacb8e3eb3bd7513a13fe4ba3b2664a19fc4a6b", size = 19157705, upload-time = "2025-10-22T03:46:40.415Z" }, + { url = "https://files.pythonhosted.org/packages/3c/35/4e40f2fba272a6698d62be2cd21ddc3675edfc1a4b9ddefcc4648f115315/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:76ff670550dc23e58ea9bc53b5149b99a44e63b34b524f7b8547469aaa0dcb8c", size = 15226915, upload-time = "2025-10-22T03:46:27.773Z" }, + { url = "https://files.pythonhosted.org/packages/ef/88/9cc25d2bafe6bc0d4d3c1db3ade98196d5b355c0b273e6a5dc09c5d5d0d5/onnxruntime-1.23.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f9b4ae77f8e3c9bee50c27bc1beede83f786fe1d52e99ac85aa8d65a01e9b77", size = 17382649, upload-time = "2025-10-22T03:47:02.782Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b4/569d298f9fc4d286c11c45e85d9ffa9e877af12ace98af8cab52396e8f46/onnxruntime-1.23.2-cp312-cp312-win_amd64.whl", hash = "sha256:25de5214923ce941a3523739d34a520aac30f21e631de53bba9174dc9c004435", size = 13470528, upload-time = "2025-10-22T03:47:28.106Z" }, + { url = "https://files.pythonhosted.org/packages/3d/41/fba0cabccecefe4a1b5fc8020c44febb334637f133acefc7ec492029dd2c/onnxruntime-1.23.2-cp313-cp313-macosx_13_0_arm64.whl", hash = "sha256:2ff531ad8496281b4297f32b83b01cdd719617e2351ffe0dba5684fb283afa1f", size = 17196337, upload-time = "2025-10-22T03:46:35.168Z" }, + { url = "https://files.pythonhosted.org/packages/fe/f9/2d49ca491c6a986acce9f1d1d5fc2099108958cc1710c28e89a032c9cfe9/onnxruntime-1.23.2-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:162f4ca894ec3de1a6fd53589e511e06ecdc3ff646849b62a9da7489dee9ce95", size = 19157691, upload-time = "2025-10-22T03:46:43.518Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a1/428ee29c6eaf09a6f6be56f836213f104618fb35ac6cc586ff0f477263eb/onnxruntime-1.23.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45d127d6e1e9b99d1ebeae9bcd8f98617a812f53f46699eafeb976275744826b", size = 15226898, upload-time = "2025-10-22T03:46:30.039Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2b/b57c8a2466a3126dbe0a792f56ad7290949b02f47b86216cd47d857e4b77/onnxruntime-1.23.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bace4e0d46480fbeeb7bbe1ffe1f080e6663a42d1086ff95c1551f2d39e7872", size = 17382518, upload-time = "2025-10-22T03:47:05.407Z" }, + { url = "https://files.pythonhosted.org/packages/4a/93/aba75358133b3a941d736816dd392f687e7eab77215a6e429879080b76b6/onnxruntime-1.23.2-cp313-cp313-win_amd64.whl", hash = "sha256:1f9cc0a55349c584f083c1c076e611a7c35d5b867d5d6e6d6c823bf821978088", size = 13470276, upload-time = "2025-10-22T03:47:31.193Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3d/6830fa61c69ca8e905f237001dbfc01689a4e4ab06147020a4518318881f/onnxruntime-1.23.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9d2385e774f46ac38f02b3a91a91e30263d41b2f1f4f26ae34805b2a9ddef466", size = 15229610, upload-time = "2025-10-22T03:46:32.239Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ca/862b1e7a639460f0ca25fd5b6135fb42cf9deea86d398a92e44dfda2279d/onnxruntime-1.23.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2b9233c4947907fd1818d0e581c049c41ccc39b2856cc942ff6d26317cee145", size = 17394184, upload-time = "2025-10-22T03:47:08.127Z" }, +] + +[[package]] +name = "openai" +version = "1.109.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" }, +] + +[[package]] +name = "opencv-python" +version = "4.11.0.86" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/06/68c27a523103dad5837dc5b87e71285280c4f098c60e4fe8a8db6486ab09/opencv-python-4.11.0.86.tar.gz", hash = "sha256:03d60ccae62304860d232272e4a4fda93c39d595780cb40b161b310244b736a4", size = 95171956, upload-time = "2025-01-16T13:52:24.737Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/4d/53b30a2a3ac1f75f65a59eb29cf2ee7207ce64867db47036ad61743d5a23/opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_arm64.whl", hash = "sha256:432f67c223f1dc2824f5e73cdfcd9db0efc8710647d4e813012195dc9122a52a", size = 37326322, upload-time = "2025-01-16T13:52:25.887Z" }, + { url = "https://files.pythonhosted.org/packages/3b/84/0a67490741867eacdfa37bc18df96e08a9d579583b419010d7f3da8ff503/opencv_python-4.11.0.86-cp37-abi3-macosx_13_0_x86_64.whl", hash = "sha256:9d05ef13d23fe97f575153558653e2d6e87103995d54e6a35db3f282fe1f9c66", size = 56723197, upload-time = "2025-01-16T13:55:21.222Z" }, + { url = "https://files.pythonhosted.org/packages/f3/bd/29c126788da65c1fb2b5fb621b7fed0ed5f9122aa22a0868c5e2c15c6d23/opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b92ae2c8852208817e6776ba1ea0d6b1e0a1b5431e971a2a0ddd2a8cc398202", size = 42230439, upload-time = "2025-01-16T13:51:35.822Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8b/90eb44a40476fa0e71e05a0283947cfd74a5d36121a11d926ad6f3193cc4/opencv_python-4.11.0.86-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b02611523803495003bd87362db3e1d2a0454a6a63025dc6658a9830570aa0d", size = 62986597, upload-time = "2025-01-16T13:52:08.836Z" }, + { url = "https://files.pythonhosted.org/packages/fb/d7/1d5941a9dde095468b288d989ff6539dd69cd429dbf1b9e839013d21b6f0/opencv_python-4.11.0.86-cp37-abi3-win32.whl", hash = "sha256:810549cb2a4aedaa84ad9a1c92fbfdfc14090e2749cedf2c1589ad8359aa169b", size = 29384337, upload-time = "2025-01-16T13:52:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/f1c30a92854540bf789e9cd5dde7ef49bbe63f855b85a2e6b3db8135c591/opencv_python-4.11.0.86-cp37-abi3-win_amd64.whl", hash = "sha256:085ad9b77c18853ea66283e98affefe2de8cc4c1f43eda4c100cf9b2721142ec", size = 39488044, upload-time = "2025-01-16T13:52:21.928Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "importlib-metadata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/34/e4e9245c868c6490a46ffedf6bd5b0f512bbc0a848b19e3a51f6bbad648c/opentelemetry_api-1.28.2.tar.gz", hash = "sha256:ecdc70c7139f17f9b0cf3742d57d7020e3e8315d6cffcdf1a12a905d45b19cc0", size = 62796, upload-time = "2024-11-18T18:29:42.747Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/58/b17393cdfc149e14ee84c662abf921993dcce8058628359ef1f49e2abb97/opentelemetry_api-1.28.2-py3-none-any.whl", hash = "sha256:6fcec89e265beb258fe6b1acaaa3c8c705a934bd977b9f534a2b7c0d2d4275a6", size = 64302, upload-time = "2024-11-18T18:29:16.783Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/eb/ad88c61b4e51cdd294ad4ae7c45b35120fb381eb019675954c4fc15b6c4c/opentelemetry_exporter_otlp-1.28.2.tar.gz", hash = "sha256:45f8d7fe4cdd41526464b542ce91b1fd1ae661be92d2c6cba71a3d948b2bdf70", size = 6155, upload-time = "2024-11-18T18:29:45.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/16/65b0f0f9a85e6c0e1ce30e0ea96e0174ca4db85301883d1d6a9702700946/opentelemetry_exporter_otlp-1.28.2-py3-none-any.whl", hash = "sha256:b50f6d4a80e6bcd329e36f360ac486ecfa106ea704d6226ceea05d3a48455f70", size = 7010, upload-time = "2024-11-18T18:29:21.195Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/cd/cd990f891b64e7698b8a6b6ab90dfac7f957db5a3d06788acd52f73ad4c0/opentelemetry_exporter_otlp_proto_common-1.28.2.tar.gz", hash = "sha256:7aebaa5fc9ff6029374546df1f3a62616fda07fccd9c6a8b7892ec130dd8baca", size = 19136, upload-time = "2024-11-18T18:29:46.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/4d/769f3b1b1c6af5e603da50349ba31af757897540a75d666de22d39461055/opentelemetry_exporter_otlp_proto_common-1.28.2-py3-none-any.whl", hash = "sha256:545b1943b574f666c35b3d6cc67cb0b111060727e93a1e2866e346b33bff2a12", size = 18460, upload-time = "2024-11-18T18:29:22.79Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/4c/b5374467e97f2b290611de746d0e6cab3a07aec865d6b99d11535cd60059/opentelemetry_exporter_otlp_proto_grpc-1.28.2.tar.gz", hash = "sha256:07c10378380bbb01a7f621a5ce833fc1fab816e971140cd3ea1cd587840bc0e6", size = 26227, upload-time = "2024-11-18T18:29:47.576Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/7e/6af5a7de87988cfc951db86f7fd0ecaabc20bc112fd9cfe06b8a01f11400/opentelemetry_exporter_otlp_proto_grpc-1.28.2-py3-none-any.whl", hash = "sha256:6083d9300863aab35bfce7c172d5fc1007686e6f8dff366eae460cd9a21592e2", size = 18518, upload-time = "2024-11-18T18:29:23.71Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/91/4e32e52d13dbdf9560bc095dfe66a2c09e0034a886f7725fcda8fe10a052/opentelemetry_exporter_otlp_proto_http-1.28.2.tar.gz", hash = "sha256:d9b353d67217f091aaf4cfe8693c170973bb3e90a558992570d97020618fda79", size = 15043, upload-time = "2024-11-18T18:29:48.237Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/23/802b889cf8bf3e235f30fbcbaa2b3fd484fe8c76b5b4db00f00c0e9af20f/opentelemetry_exporter_otlp_proto_http-1.28.2-py3-none-any.whl", hash = "sha256:af921c18212a56ef4be68458ba475791c0517ebfd8a2ff04669c9cd477d90ff2", size = 17218, upload-time = "2024-11-18T18:29:25.474Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.49b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/1f/9fa51f6f64f4d179f4e3370eb042176ff7717682428552f5e1f4c5efcc09/opentelemetry_instrumentation-0.49b2.tar.gz", hash = "sha256:8cf00cc8d9d479e4b72adb9bd267ec544308c602b7188598db5a687e77b298e2", size = 26480, upload-time = "2024-11-18T18:39:46.03Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/e3/ad23372525653b0221212d5e2a71bd97aae64cc35f90cbf0c70de57dfa4e/opentelemetry_instrumentation-0.49b2-py3-none-any.whl", hash = "sha256:f6d782b0ef9fef4a4c745298651c65f5c532c34cd4c40d230ab5b9f3b3b4d151", size = 30693, upload-time = "2024-11-18T18:38:31.962Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/45/96c4f34c79fd87dc8a1c0c432f23a5a202729f21e4e63c8b36fc8e57767a/opentelemetry_proto-1.28.2.tar.gz", hash = "sha256:7c0d125a6b71af88bfeeda16bfdd0ff63dc2cf0039baf6f49fa133b203e3f566", size = 34316, upload-time = "2024-11-18T18:29:57.324Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/12/646f48d6d698a6df0437a22b591387440dc4888c8752d1a1300f730da710/opentelemetry_proto-1.28.2-py3-none-any.whl", hash = "sha256:0837498f59db55086462915e5898d0b1a18c1392f6db4d7e937143072a72370c", size = 55818, upload-time = "2024-11-18T18:29:37.002Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.28.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/f4/840a5af4efe48d7fb4c456ad60fd624673e871a60d6494f7ff8a934755d4/opentelemetry_sdk-1.28.2.tar.gz", hash = "sha256:5fed24c5497e10df30282456fe2910f83377797511de07d14cec0d3e0a1a3110", size = 157272, upload-time = "2024-11-18T18:29:58.094Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/8b/4f2b418496c08016d4384f9b1c4725a8af7faafa248d624be4bb95993ce1/opentelemetry_sdk-1.28.2-py3-none-any.whl", hash = "sha256:93336c129556f1e3ccd21442b94d3521759541521861b2214c499571b85cb71b", size = 118757, upload-time = "2024-11-18T18:29:38.744Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.49b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "deprecated" }, + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0a/e3b93f94aa3223c6fd8e743502a1fefd4fb3a753d8f501ce2a418f7c0bd4/opentelemetry_semantic_conventions-0.49b2.tar.gz", hash = "sha256:44e32ce6a5bb8d7c0c617f84b9dc1c8deda1045a07dc16a688cc7cbeab679997", size = 95213, upload-time = "2024-11-18T18:29:58.915Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/be/6661c8f76708bb3ba38c90be8fa8d7ffe17ccbc5cbbc229334f5535f6448/opentelemetry_semantic_conventions-0.49b2-py3-none-any.whl", hash = "sha256:51e7e1d0daa958782b6c2a8ed05e5f0e7dd0716fc327ac058777b8659649ee54", size = 159199, upload-time = "2024-11-18T18:29:39.906Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" }, + { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" }, + { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" }, + { url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781, upload-time = "2025-10-24T15:49:05.969Z" }, + { url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391, upload-time = "2025-10-24T15:49:07.355Z" }, + { url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252, upload-time = "2025-10-24T15:49:08.869Z" }, + { url = "https://files.pythonhosted.org/packages/63/51/6b556192a04595b93e277a9ff71cd0cc06c21a7df98bcce5963fa0f5e36f/orjson-3.11.4-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4371de39319d05d3f482f372720b841c841b52f5385bd99c61ed69d55d9ab50", size = 243571, upload-time = "2025-10-24T15:49:10.008Z" }, + { url = "https://files.pythonhosted.org/packages/1c/2c/2602392ddf2601d538ff11848b98621cd465d1a1ceb9db9e8043181f2f7b/orjson-3.11.4-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:e41fd3b3cac850eaae78232f37325ed7d7436e11c471246b87b2cd294ec94853", size = 128891, upload-time = "2025-10-24T15:49:11.297Z" }, + { url = "https://files.pythonhosted.org/packages/4e/47/bf85dcf95f7a3a12bf223394a4f849430acd82633848d52def09fa3f46ad/orjson-3.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:600e0e9ca042878c7fdf189cf1b028fe2c1418cc9195f6cb9824eb6ed99cb938", size = 130137, upload-time = "2025-10-24T15:49:12.544Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/a0cb31007f3ab6f1fd2a1b17057c7c349bc2baf8921a85c0180cc7be8011/orjson-3.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7bbf9b333f1568ef5da42bc96e18bf30fd7f8d54e9ae066d711056add508e415", size = 129152, upload-time = "2025-10-24T15:49:13.754Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ef/2811def7ce3d8576b19e3929fff8f8f0d44bc5eb2e0fdecb2e6e6cc6c720/orjson-3.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806363144bb6e7297b8e95870e78d30a649fdc4e23fc84daa80c8ebd366ce44", size = 136834, upload-time = "2025-10-24T15:49:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/00/d4/9aee9e54f1809cec8ed5abd9bc31e8a9631d19460e3b8470145d25140106/orjson-3.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad355e8308493f527d41154e9053b86a5be892b3b359a5c6d5d95cda23601cb2", size = 137519, upload-time = "2025-10-24T15:49:16.557Z" }, + { url = "https://files.pythonhosted.org/packages/db/ea/67bfdb5465d5679e8ae8d68c11753aaf4f47e3e7264bad66dc2f2249e643/orjson-3.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a7517482667fb9f0ff1b2f16fe5829296ed7a655d04d68cd9711a4d8a4e708", size = 136749, upload-time = "2025-10-24T15:49:17.796Z" }, + { url = "https://files.pythonhosted.org/packages/01/7e/62517dddcfce6d53a39543cd74d0dccfcbdf53967017c58af68822100272/orjson-3.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97eb5942c7395a171cbfecc4ef6701fc3c403e762194683772df4c54cfbb2210", size = 136325, upload-time = "2025-10-24T15:49:19.347Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/40516739f99ab4c7ec3aaa5cc242d341fcb03a45d89edeeaabc5f69cb2cf/orjson-3.11.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:149d95d5e018bdd822e3f38c103b1a7c91f88d38a88aada5c4e9b3a73a244241", size = 140204, upload-time = "2025-10-24T15:49:20.545Z" }, + { url = "https://files.pythonhosted.org/packages/82/18/ff5734365623a8916e3a4037fcef1cd1782bfc14cf0992afe7940c5320bf/orjson-3.11.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:624f3951181eb46fc47dea3d221554e98784c823e7069edb5dbd0dc826ac909b", size = 406242, upload-time = "2025-10-24T15:49:21.884Z" }, + { url = "https://files.pythonhosted.org/packages/e1/43/96436041f0a0c8c8deca6a05ebeaf529bf1de04839f93ac5e7c479807aec/orjson-3.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:03bfa548cf35e3f8b3a96c4e8e41f753c686ff3d8e182ce275b1751deddab58c", size = 150013, upload-time = "2025-10-24T15:49:23.185Z" }, + { url = "https://files.pythonhosted.org/packages/1b/48/78302d98423ed8780479a1e682b9aecb869e8404545d999d34fa486e573e/orjson-3.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:525021896afef44a68148f6ed8a8bf8375553d6066c7f48537657f64823565b9", size = 139951, upload-time = "2025-10-24T15:49:24.428Z" }, + { url = "https://files.pythonhosted.org/packages/4a/7b/ad613fdcdaa812f075ec0875143c3d37f8654457d2af17703905425981bf/orjson-3.11.4-cp312-cp312-win32.whl", hash = "sha256:b58430396687ce0f7d9eeb3dd47761ca7d8fda8e9eb92b3077a7a353a75efefa", size = 136049, upload-time = "2025-10-24T15:49:25.973Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/9cf47c3ff5f39b8350fb21ba65d789b6a1129d4cbb3033ba36c8a9023520/orjson-3.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:c6dbf422894e1e3c80a177133c0dda260f81428f9de16d61041949f6a2e5c140", size = 131461, upload-time = "2025-10-24T15:49:27.259Z" }, + { url = "https://files.pythonhosted.org/packages/c6/3b/e2425f61e5825dc5b08c2a5a2b3af387eaaca22a12b9c8c01504f8614c36/orjson-3.11.4-cp312-cp312-win_arm64.whl", hash = "sha256:d38d2bc06d6415852224fcc9c0bfa834c25431e466dc319f0edd56cca81aa96e", size = 126167, upload-time = "2025-10-24T15:49:28.511Z" }, + { url = "https://files.pythonhosted.org/packages/23/15/c52aa7112006b0f3d6180386c3a46ae057f932ab3425bc6f6ac50431cca1/orjson-3.11.4-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2d6737d0e616a6e053c8b4acc9eccea6b6cce078533666f32d140e4f85002534", size = 243525, upload-time = "2025-10-24T15:49:29.737Z" }, + { url = "https://files.pythonhosted.org/packages/ec/38/05340734c33b933fd114f161f25a04e651b0c7c33ab95e9416ade5cb44b8/orjson-3.11.4-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:afb14052690aa328cc118a8e09f07c651d301a72e44920b887c519b313d892ff", size = 128871, upload-time = "2025-10-24T15:49:31.109Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/ae8d34899ff0c012039b5a7cb96a389b2476e917733294e498586b45472d/orjson-3.11.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38aa9e65c591febb1b0aed8da4d469eba239d434c218562df179885c94e1a3ad", size = 130055, upload-time = "2025-10-24T15:49:33.382Z" }, + { url = "https://files.pythonhosted.org/packages/33/aa/6346dd5073730451bee3681d901e3c337e7ec17342fb79659ec9794fc023/orjson-3.11.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f2cf4dfaf9163b0728d061bebc1e08631875c51cd30bf47cb9e3293bfbd7dcd5", size = 129061, upload-time = "2025-10-24T15:49:34.935Z" }, + { url = "https://files.pythonhosted.org/packages/39/e4/8eea51598f66a6c853c380979912d17ec510e8e66b280d968602e680b942/orjson-3.11.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89216ff3dfdde0e4070932e126320a1752c9d9a758d6a32ec54b3b9334991a6a", size = 136541, upload-time = "2025-10-24T15:49:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/47/cb8c654fa9adcc60e99580e17c32b9e633290e6239a99efa6b885aba9dbc/orjson-3.11.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9daa26ca8e97fae0ce8aa5d80606ef8f7914e9b129b6b5df9104266f764ce436", size = 137535, upload-time = "2025-10-24T15:49:38.307Z" }, + { url = "https://files.pythonhosted.org/packages/43/92/04b8cc5c2b729f3437ee013ce14a60ab3d3001465d95c184758f19362f23/orjson-3.11.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c8b2769dc31883c44a9cd126560327767f848eb95f99c36c9932f51090bfce9", size = 136703, upload-time = "2025-10-24T15:49:40.795Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fd/d0733fcb9086b8be4ebcfcda2d0312865d17d0d9884378b7cffb29d0763f/orjson-3.11.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1469d254b9884f984026bd9b0fa5bbab477a4bfe558bba6848086f6d43eb5e73", size = 136293, upload-time = "2025-10-24T15:49:42.347Z" }, + { url = "https://files.pythonhosted.org/packages/c2/d7/3c5514e806837c210492d72ae30ccf050ce3f940f45bf085bab272699ef4/orjson-3.11.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:68e44722541983614e37117209a194e8c3ad07838ccb3127d96863c95ec7f1e0", size = 140131, upload-time = "2025-10-24T15:49:43.638Z" }, + { url = "https://files.pythonhosted.org/packages/9c/dd/ba9d32a53207babf65bd510ac4d0faaa818bd0df9a9c6f472fe7c254f2e3/orjson-3.11.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:8e7805fda9672c12be2f22ae124dcd7b03928d6c197544fe12174b86553f3196", size = 406164, upload-time = "2025-10-24T15:49:45.498Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f9/f68ad68f4af7c7bde57cd514eaa2c785e500477a8bc8f834838eb696a685/orjson-3.11.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04b69c14615fb4434ab867bf6f38b2d649f6f300af30a6705397e895f7aec67a", size = 149859, upload-time = "2025-10-24T15:49:46.981Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d2/7f847761d0c26818395b3d6b21fb6bc2305d94612a35b0a30eae65a22728/orjson-3.11.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:639c3735b8ae7f970066930e58cf0ed39a852d417c24acd4a25fc0b3da3c39a6", size = 139926, upload-time = "2025-10-24T15:49:48.321Z" }, + { url = "https://files.pythonhosted.org/packages/9f/37/acd14b12dc62db9a0e1d12386271b8661faae270b22492580d5258808975/orjson-3.11.4-cp313-cp313-win32.whl", hash = "sha256:6c13879c0d2964335491463302a6ca5ad98105fc5db3565499dcb80b1b4bd839", size = 136007, upload-time = "2025-10-24T15:49:49.938Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a9/967be009ddf0a1fffd7a67de9c36656b28c763659ef91352acc02cbe364c/orjson-3.11.4-cp313-cp313-win_amd64.whl", hash = "sha256:09bf242a4af98732db9f9a1ec57ca2604848e16f132e3f72edfd3c5c96de009a", size = 131314, upload-time = "2025-10-24T15:49:51.248Z" }, + { url = "https://files.pythonhosted.org/packages/cb/db/399abd6950fbd94ce125cb8cd1a968def95174792e127b0642781e040ed4/orjson-3.11.4-cp313-cp313-win_arm64.whl", hash = "sha256:a85f0adf63319d6c1ba06fb0dbf997fced64a01179cf17939a6caca662bf92de", size = 126152, upload-time = "2025-10-24T15:49:52.922Z" }, + { url = "https://files.pythonhosted.org/packages/25/e3/54ff63c093cc1697e758e4fceb53164dd2661a7d1bcd522260ba09f54533/orjson-3.11.4-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:42d43a1f552be1a112af0b21c10a5f553983c2a0938d2bbb8ecd8bc9fb572803", size = 243501, upload-time = "2025-10-24T15:49:54.288Z" }, + { url = "https://files.pythonhosted.org/packages/ac/7d/e2d1076ed2e8e0ae9badca65bf7ef22710f93887b29eaa37f09850604e09/orjson-3.11.4-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:26a20f3fbc6c7ff2cb8e89c4c5897762c9d88cf37330c6a117312365d6781d54", size = 128862, upload-time = "2025-10-24T15:49:55.961Z" }, + { url = "https://files.pythonhosted.org/packages/9f/37/ca2eb40b90621faddfa9517dfe96e25f5ae4d8057a7c0cdd613c17e07b2c/orjson-3.11.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e3f20be9048941c7ffa8fc523ccbd17f82e24df1549d1d1fe9317712d19938e", size = 130047, upload-time = "2025-10-24T15:49:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/c7/62/1021ed35a1f2bad9040f05fa4cc4f9893410df0ba3eaa323ccf899b1c90a/orjson-3.11.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aac364c758dc87a52e68e349924d7e4ded348dedff553889e4d9f22f74785316", size = 129073, upload-time = "2025-10-24T15:49:58.782Z" }, + { url = "https://files.pythonhosted.org/packages/e8/3f/f84d966ec2a6fd5f73b1a707e7cd876813422ae4bf9f0145c55c9c6a0f57/orjson-3.11.4-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5c54a6d76e3d741dcc3f2707f8eeb9ba2a791d3adbf18f900219b62942803b1", size = 136597, upload-time = "2025-10-24T15:50:00.12Z" }, + { url = "https://files.pythonhosted.org/packages/32/78/4fa0aeca65ee82bbabb49e055bd03fa4edea33f7c080c5c7b9601661ef72/orjson-3.11.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f28485bdca8617b79d44627f5fb04336897041dfd9fa66d383a49d09d86798bc", size = 137515, upload-time = "2025-10-24T15:50:01.57Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9d/0c102e26e7fde40c4c98470796d050a2ec1953897e2c8ab0cb95b0759fa2/orjson-3.11.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bfc2a484cad3585e4ba61985a6062a4c2ed5c7925db6d39f1fa267c9d166487f", size = 136703, upload-time = "2025-10-24T15:50:02.944Z" }, + { url = "https://files.pythonhosted.org/packages/df/ac/2de7188705b4cdfaf0b6c97d2f7849c17d2003232f6e70df98602173f788/orjson-3.11.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e34dbd508cb91c54f9c9788923daca129fe5b55c5b4eebe713bf5ed3791280cf", size = 136311, upload-time = "2025-10-24T15:50:04.441Z" }, + { url = "https://files.pythonhosted.org/packages/e0/52/847fcd1a98407154e944feeb12e3b4d487a0e264c40191fb44d1269cbaa1/orjson-3.11.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b13c478fa413d4b4ee606ec8e11c3b2e52683a640b006bb586b3041c2ca5f606", size = 140127, upload-time = "2025-10-24T15:50:07.398Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ae/21d208f58bdb847dd4d0d9407e2929862561841baa22bdab7aea10ca088e/orjson-3.11.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:724ca721ecc8a831b319dcd72cfa370cc380db0bf94537f08f7edd0a7d4e1780", size = 406201, upload-time = "2025-10-24T15:50:08.796Z" }, + { url = "https://files.pythonhosted.org/packages/8d/55/0789d6de386c8366059db098a628e2ad8798069e94409b0d8935934cbcb9/orjson-3.11.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:977c393f2e44845ce1b540e19a786e9643221b3323dae190668a98672d43fb23", size = 149872, upload-time = "2025-10-24T15:50:10.234Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1d/7ff81ea23310e086c17b41d78a72270d9de04481e6113dbe2ac19118f7fb/orjson-3.11.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1e539e382cf46edec157ad66b0b0872a90d829a6b71f17cb633d6c160a223155", size = 139931, upload-time = "2025-10-24T15:50:11.623Z" }, + { url = "https://files.pythonhosted.org/packages/77/92/25b886252c50ed64be68c937b562b2f2333b45afe72d53d719e46a565a50/orjson-3.11.4-cp314-cp314-win32.whl", hash = "sha256:d63076d625babab9db5e7836118bdfa086e60f37d8a174194ae720161eb12394", size = 136065, upload-time = "2025-10-24T15:50:13.025Z" }, + { url = "https://files.pythonhosted.org/packages/63/b8/718eecf0bb7e9d64e4956afaafd23db9f04c776d445f59fe94f54bdae8f0/orjson-3.11.4-cp314-cp314-win_amd64.whl", hash = "sha256:0a54d6635fa3aaa438ae32e8570b9f0de36f3f6562c308d2a2a452e8b0592db1", size = 131310, upload-time = "2025-10-24T15:50:14.46Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bf/def5e25d4d8bfce296a9a7c8248109bf58622c21618b590678f945a2c59c/orjson-3.11.4-cp314-cp314-win_arm64.whl", hash = "sha256:78b999999039db3cf58f6d230f524f04f75f129ba3d1ca2ed121f8657e575d3d", size = 126151, upload-time = "2025-10-24T15:50:15.878Z" }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222, upload-time = "2024-09-20T13:08:56.254Z" }, + { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274, upload-time = "2024-09-20T13:08:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836, upload-time = "2024-09-20T19:01:57.571Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505, upload-time = "2024-09-20T13:09:01.501Z" }, + { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420, upload-time = "2024-09-20T19:02:00.678Z" }, + { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457, upload-time = "2024-09-20T13:09:04.105Z" }, + { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166, upload-time = "2024-09-20T13:09:06.917Z" }, + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893, upload-time = "2024-09-20T13:09:09.655Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475, upload-time = "2024-09-20T13:09:14.718Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645, upload-time = "2024-09-20T19:02:03.88Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445, upload-time = "2024-09-20T13:09:17.621Z" }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235, upload-time = "2024-09-20T19:02:07.094Z" }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756, upload-time = "2024-09-20T13:09:20.474Z" }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248, upload-time = "2024-09-20T13:09:23.137Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/3b8f4e0ed70644e85cfdcd57454686b9057c6c38d2f74fe4b8bc2527214a/pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015", size = 12477643, upload-time = "2024-09-20T13:09:25.522Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/b3f5d1838500e22c8d793625da672f3eec046b1a99257666c94446969282/pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28", size = 11281573, upload-time = "2024-09-20T13:09:28.012Z" }, + { url = "https://files.pythonhosted.org/packages/f5/94/6c79b07f0e5aab1dcfa35a75f4817f5c4f677931d4234afcd75f0e6a66ca/pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0", size = 15196085, upload-time = "2024-09-20T19:02:10.451Z" }, + { url = "https://files.pythonhosted.org/packages/e8/31/aa8da88ca0eadbabd0a639788a6da13bb2ff6edbbb9f29aa786450a30a91/pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24", size = 12711809, upload-time = "2024-09-20T13:09:30.814Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7c/c6dbdb0cb2a4344cacfb8de1c5808ca885b2e4dcfde8008266608f9372af/pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659", size = 16356316, upload-time = "2024-09-20T19:02:13.825Z" }, + { url = "https://files.pythonhosted.org/packages/57/b7/8b757e7d92023b832869fa8881a992696a0bfe2e26f72c9ae9f255988d42/pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb", size = 14022055, upload-time = "2024-09-20T13:09:33.462Z" }, + { url = "https://files.pythonhosted.org/packages/3b/bc/4b18e2b8c002572c5a441a64826252ce5da2aa738855747247a971988043/pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d", size = 11481175, upload-time = "2024-09-20T13:09:35.871Z" }, + { url = "https://files.pythonhosted.org/packages/76/a3/a5d88146815e972d40d19247b2c162e88213ef51c7c25993942c39dbf41d/pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468", size = 12615650, upload-time = "2024-09-20T13:09:38.685Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8c/f0fd18f6140ddafc0c24122c8a964e48294acc579d47def376fef12bcb4a/pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18", size = 11290177, upload-time = "2024-09-20T13:09:41.141Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f9/e995754eab9c0f14c6777401f7eece0943840b7a9fc932221c19d1abee9f/pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2", size = 14651526, upload-time = "2024-09-20T19:02:16.905Z" }, + { url = "https://files.pythonhosted.org/packages/25/b0/98d6ae2e1abac4f35230aa756005e8654649d305df9a28b16b9ae4353bff/pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4", size = 11871013, upload-time = "2024-09-20T13:09:44.39Z" }, + { url = "https://files.pythonhosted.org/packages/cc/57/0f72a10f9db6a4628744c8e8f0df4e6e21de01212c7c981d31e50ffc8328/pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d", size = 15711620, upload-time = "2024-09-20T19:02:20.639Z" }, + { url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436, upload-time = "2024-09-20T13:09:48.112Z" }, +] + +[[package]] +name = "pdfminer-six" +version = "20191110" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "chardet" }, + { name = "pycryptodome" }, + { name = "six" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/31/7acc148333749d6a8ef7cbf25902bdf59a462811a69d040a9a259916b6bd/pdfminer.six-20191110.tar.gz", hash = "sha256:141a53ec491bee6d45bf9b2c7f82601426fb5d32636bcf6b9c8a8f3b6431fea6", size = 10280313, upload-time = "2019-11-10T11:31:02.556Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/83/200b2723bcbf1d1248a8a7d16e6dd6cb970b5331397b11948428d7ebcf37/pdfminer.six-20191110-py2.py3-none-any.whl", hash = "sha256:ca2ca58f3ac66a486bce53a6ddba95dc2b27781612915fa41c444790ba9cd2a8", size = 5606096, upload-time = "2019-11-10T11:30:50.803Z" }, +] + +[[package]] +name = "pg8000" +version = "1.31.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "scramp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/9a/077ab21e700051e03d8c5232b6bcb9a1a4d4b6242c9a0226df2cfa306414/pg8000-1.31.5.tar.gz", hash = "sha256:46ebb03be52b7a77c03c725c79da2ca281d6e8f59577ca66b17c9009618cae78", size = 118933, upload-time = "2025-09-14T09:16:49.748Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/07/5fd183858dff4d24840f07fc845f213cd371a19958558607ba22035dadd7/pg8000-1.31.5-py3-none-any.whl", hash = "sha256:0af2c1926b153307639868d2ee5cef6cd3a7d07448e12736989b10e1d491e201", size = 57816, upload-time = "2025-09-14T09:16:47.798Z" }, +] + +[[package]] +name = "pgvector" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/44/43/9a0fb552ab4fd980680c2037962e331820f67585df740bedc4a2b50faf20/pgvector-0.4.1.tar.gz", hash = "sha256:83d3a1c044ff0c2f1e95d13dfb625beb0b65506cfec0941bfe81fd0ad44f4003", size = 30646, upload-time = "2025-04-26T18:56:37.151Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/21/b5735d5982892c878ff3d01bb06e018c43fc204428361ee9fc25a1b2125c/pgvector-0.4.1-py3-none-any.whl", hash = "sha256:34bb4e99e1b13d08a2fe82dda9f860f15ddcd0166fbb25bffe15821cbfeb7362", size = 27086, upload-time = "2025-04-26T18:56:35.956Z" }, +] + +[[package]] +name = "pillow" +version = "11.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, + { url = "https://files.pythonhosted.org/packages/1e/93/0952f2ed8db3a5a4c7a11f91965d6184ebc8cd7cbb7941a260d5f018cd2d/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd", size = 2128328, upload-time = "2025-07-01T09:14:35.276Z" }, + { url = "https://files.pythonhosted.org/packages/4b/e8/100c3d114b1a0bf4042f27e0f87d2f25e857e838034e98ca98fe7b8c0a9c/pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8", size = 2170652, upload-time = "2025-07-01T09:14:37.203Z" }, + { url = "https://files.pythonhosted.org/packages/aa/86/3f758a28a6e381758545f7cdb4942e1cb79abd271bea932998fc0db93cb6/pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f", size = 2227443, upload-time = "2025-07-01T09:14:39.344Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/91d5b3ffa718df2f53b0dc109877993e511f4fd055d7e9508682e8aba092/pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c", size = 5278474, upload-time = "2025-07-01T09:14:41.843Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0e/37d7d3eca6c879fbd9dba21268427dffda1ab00d4eb05b32923d4fbe3b12/pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd", size = 4686038, upload-time = "2025-07-01T09:14:44.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/3426e5c7f6565e752d81221af9d3676fdbb4f352317ceafd42899aaf5d8a/pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e", size = 5864407, upload-time = "2025-07-03T13:10:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c1/c6c423134229f2a221ee53f838d4be9d82bab86f7e2f8e75e47b6bf6cd77/pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1", size = 7639094, upload-time = "2025-07-03T13:10:21.857Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/09e6746630fe6372c67c648ff9deae52a2bc20897d51fa293571977ceb5d/pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805", size = 5973503, upload-time = "2025-07-01T09:14:45.698Z" }, + { url = "https://files.pythonhosted.org/packages/d5/1c/a2a29649c0b1983d3ef57ee87a66487fdeb45132df66ab30dd37f7dbe162/pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8", size = 6642574, upload-time = "2025-07-01T09:14:47.415Z" }, + { url = "https://files.pythonhosted.org/packages/36/de/d5cc31cc4b055b6c6fd990e3e7f0f8aaf36229a2698501bcb0cdf67c7146/pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2", size = 6084060, upload-time = "2025-07-01T09:14:49.636Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ea/502d938cbaeec836ac28a9b730193716f0114c41325db428e6b280513f09/pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b", size = 6721407, upload-time = "2025-07-01T09:14:51.962Z" }, + { url = "https://files.pythonhosted.org/packages/45/9c/9c5e2a73f125f6cbc59cc7087c8f2d649a7ae453f83bd0362ff7c9e2aee2/pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3", size = 6273841, upload-time = "2025-07-01T09:14:54.142Z" }, + { url = "https://files.pythonhosted.org/packages/23/85/397c73524e0cd212067e0c969aa245b01d50183439550d24d9f55781b776/pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51", size = 6978450, upload-time = "2025-07-01T09:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/17/d2/622f4547f69cd173955194b78e4d19ca4935a1b0f03a302d655c9f6aae65/pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580", size = 2423055, upload-time = "2025-07-01T09:14:58.072Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/a8a2ac21dda2e82480852978416cfacd439a4b490a501a288ecf4fe2532d/pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e", size = 5281110, upload-time = "2025-07-01T09:14:59.79Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/b79754ca790f315918732e18f82a8146d33bcd7f4494380457ea89eb883d/pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d", size = 4689547, upload-time = "2025-07-01T09:15:01.648Z" }, + { url = "https://files.pythonhosted.org/packages/49/20/716b8717d331150cb00f7fdd78169c01e8e0c219732a78b0e59b6bdb2fd6/pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced", size = 5901554, upload-time = "2025-07-03T13:10:27.018Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/a9f3a2514a65bb071075063a96f0a5cf949c2f2fce683c15ccc83b1c1cab/pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c", size = 7669132, upload-time = "2025-07-03T13:10:33.01Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/da78805cbdbee9cb43efe8261dd7cc0b4b93f2ac79b676c03159e9db2187/pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8", size = 6005001, upload-time = "2025-07-01T09:15:03.365Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fa/ce044b91faecf30e635321351bba32bab5a7e034c60187fe9698191aef4f/pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59", size = 6668814, upload-time = "2025-07-01T09:15:05.655Z" }, + { url = "https://files.pythonhosted.org/packages/7b/51/90f9291406d09bf93686434f9183aba27b831c10c87746ff49f127ee80cb/pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe", size = 6113124, upload-time = "2025-07-01T09:15:07.358Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5a/6fec59b1dfb619234f7636d4157d11fb4e196caeee220232a8d2ec48488d/pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c", size = 6747186, upload-time = "2025-07-01T09:15:09.317Z" }, + { url = "https://files.pythonhosted.org/packages/49/6b/00187a044f98255225f172de653941e61da37104a9ea60e4f6887717e2b5/pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788", size = 6277546, upload-time = "2025-07-01T09:15:11.311Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5c/6caaba7e261c0d75bab23be79f1d06b5ad2a2ae49f028ccec801b0e853d6/pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31", size = 6985102, upload-time = "2025-07-01T09:15:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7e/b623008460c09a0cb38263c93b828c666493caee2eb34ff67f778b87e58c/pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e", size = 2424803, upload-time = "2025-07-01T09:15:15.695Z" }, + { url = "https://files.pythonhosted.org/packages/73/f4/04905af42837292ed86cb1b1dabe03dce1edc008ef14c473c5c7e1443c5d/pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12", size = 5278520, upload-time = "2025-07-01T09:15:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/41/b0/33d79e377a336247df6348a54e6d2a2b85d644ca202555e3faa0cf811ecc/pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a", size = 4686116, upload-time = "2025-07-01T09:15:19.423Z" }, + { url = "https://files.pythonhosted.org/packages/49/2d/ed8bc0ab219ae8768f529597d9509d184fe8a6c4741a6864fea334d25f3f/pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632", size = 5864597, upload-time = "2025-07-03T13:10:38.404Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3d/b932bb4225c80b58dfadaca9d42d08d0b7064d2d1791b6a237f87f661834/pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673", size = 7638246, upload-time = "2025-07-03T13:10:44.987Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/0487044b7c096f1b48f0d7ad416472c02e0e4bf6919541b111efd3cae690/pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027", size = 5973336, upload-time = "2025-07-01T09:15:21.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2d/524f9318f6cbfcc79fbc004801ea6b607ec3f843977652fdee4857a7568b/pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77", size = 6642699, upload-time = "2025-07-01T09:15:23.186Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d2/a9a4f280c6aefedce1e8f615baaa5474e0701d86dd6f1dede66726462bbd/pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874", size = 6083789, upload-time = "2025-07-01T09:15:25.1Z" }, + { url = "https://files.pythonhosted.org/packages/fe/54/86b0cd9dbb683a9d5e960b66c7379e821a19be4ac5810e2e5a715c09a0c0/pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a", size = 6720386, upload-time = "2025-07-01T09:15:27.378Z" }, + { url = "https://files.pythonhosted.org/packages/e7/95/88efcaf384c3588e24259c4203b909cbe3e3c2d887af9e938c2022c9dd48/pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214", size = 6370911, upload-time = "2025-07-01T09:15:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/2e/cc/934e5820850ec5eb107e7b1a72dd278140731c669f396110ebc326f2a503/pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635", size = 7117383, upload-time = "2025-07-01T09:15:31.128Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e9/9c0a616a71da2a5d163aa37405e8aced9a906d574b4a214bede134e731bc/pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6", size = 2511385, upload-time = "2025-07-01T09:15:33.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/33/c88376898aff369658b225262cd4f2659b13e8178e7534df9e6e1fa289f6/pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae", size = 5281129, upload-time = "2025-07-01T09:15:35.194Z" }, + { url = "https://files.pythonhosted.org/packages/1f/70/d376247fb36f1844b42910911c83a02d5544ebd2a8bad9efcc0f707ea774/pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653", size = 4689580, upload-time = "2025-07-01T09:15:37.114Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/537e930496149fbac69efd2fc4329035bbe2e5475b4165439e3be9cb183b/pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6", size = 5902860, upload-time = "2025-07-03T13:10:50.248Z" }, + { url = "https://files.pythonhosted.org/packages/bd/57/80f53264954dcefeebcf9dae6e3eb1daea1b488f0be8b8fef12f79a3eb10/pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36", size = 7670694, upload-time = "2025-07-03T13:10:56.432Z" }, + { url = "https://files.pythonhosted.org/packages/70/ff/4727d3b71a8578b4587d9c276e90efad2d6fe0335fd76742a6da08132e8c/pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b", size = 6005888, upload-time = "2025-07-01T09:15:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/05/ae/716592277934f85d3be51d7256f3636672d7b1abfafdc42cf3f8cbd4b4c8/pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477", size = 6670330, upload-time = "2025-07-01T09:15:41.269Z" }, + { url = "https://files.pythonhosted.org/packages/e7/bb/7fe6cddcc8827b01b1a9766f5fdeb7418680744f9082035bdbabecf1d57f/pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50", size = 6114089, upload-time = "2025-07-01T09:15:43.13Z" }, + { url = "https://files.pythonhosted.org/packages/8b/f5/06bfaa444c8e80f1a8e4bff98da9c83b37b5be3b1deaa43d27a0db37ef84/pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b", size = 6748206, upload-time = "2025-07-01T09:15:44.937Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/bc6f92a3e8e6e46c0ca78abfffec0037845800ea38c73483760362804c41/pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12", size = 6377370, upload-time = "2025-07-01T09:15:46.673Z" }, + { url = "https://files.pythonhosted.org/packages/4a/82/3a721f7d69dca802befb8af08b7c79ebcab461007ce1c18bd91a5d5896f9/pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db", size = 7121500, upload-time = "2025-07-01T09:15:48.512Z" }, + { url = "https://files.pythonhosted.org/packages/89/c7/5572fa4a3f45740eaab6ae86fcdf7195b55beac1371ac8c619d880cfe948/pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa", size = 2512835, upload-time = "2025-07-01T09:15:50.399Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, +] + +[[package]] +name = "pipecat-ai" +version = "0.0.91" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiofiles" }, + { name = "aiohttp" }, + { name = "audioop-lts", marker = "python_full_version >= '3.13'" }, + { name = "docstring-parser" }, + { name = "loguru" }, + { name = "markdown" }, + { name = "nltk" }, + { name = "numba" }, + { name = "numpy" }, + { name = "openai" }, + { name = "pillow" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pyloudnorm" }, + { name = "resampy" }, + { name = "soxr" }, + { name = "wait-for2", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/4c/9d3fabce446fa208a10009c6306cca4e032f591b22fb30a4db6c7fec6515/pipecat_ai-0.0.91.tar.gz", hash = "sha256:a21ce6fd062ffa4d4944e1a479418e3e1626429e346f077ff185da134c4e9bf3", size = 10700370, upload-time = "2025-10-22T02:11:52.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/4f/99336f563164b734b6849eefa6cf5bc26923b8b4ec6de0d85497a07008f6/pipecat_ai-0.0.91-py3-none-any.whl", hash = "sha256:2f70f22a4625d855ee5742cc7d654ea3ec7837fae9de3bd5cdea77a550080d14", size = 10381447, upload-time = "2025-10-22T02:11:50.436Z" }, +] + +[package.optional-dependencies] +cartesia = [ + { name = "cartesia" }, + { name = "websockets" }, +] +deepgram = [ + { name = "deepgram-sdk" }, +] +google = [ + { name = "google-cloud-speech" }, + { name = "google-cloud-texttospeech" }, + { name = "google-genai" }, + { name = "websockets" }, +] +groq = [ + { name = "groq" }, +] +runner = [ + { name = "fastapi" }, + { name = "pipecat-ai-small-webrtc-prebuilt" }, + { name = "python-dotenv" }, + { name = "uvicorn" }, +] +silero = [ + { name = "onnxruntime" }, +] +websocket = [ + { name = "fastapi" }, + { name = "websockets" }, +] + +[[package]] +name = "pipecat-ai-small-webrtc-prebuilt" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastapi", extra = ["all"] }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/14/8484ab387169779d1a6aaba90e133a067f0810ba5c53c9d0e2b6707857a9/pipecat_ai_small_webrtc_prebuilt-2.0.0.tar.gz", hash = "sha256:ad43b0ff1d4afaeae25241b3a8c2c283896f879d1e5d585ed8ed159db123178d", size = 588667, upload-time = "2025-12-01T00:27:43.27Z" } + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "plugins-module" +version = "0.0.1" +source = { editable = "modules/plugins_module" } +dependencies = [ + { name = "auth-module" }, + { name = "authenticator" }, + { name = "common-module" }, + { name = "datasource" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "user-management-module" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "auth-module", editable = "modules/auth_module" }, + { name = "authenticator", editable = "plugins/authenticator" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "datasource", editable = "plugins/datasource" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "user-management-module", editable = "modules/user_management_module" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, +] + +[[package]] +name = "pottery" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mmh3" }, + { name = "redis" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/c8/9fab08d57af56518d642cc596d4b2071d12e557c73dcc6b1352ba6a97b9e/pottery-3.0.1.tar.gz", hash = "sha256:c0e223ccfa72d620e4ba16e7e381ab6498745f875e6c939c6b396525e9aee112", size = 88997, upload-time = "2025-03-21T14:07:36.095Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/27/ec4077c1b525c3b8511ad35879b2ebd54de631fd2ac1409cc0c341ec88a0/pottery-3.0.1-py3-none-any.whl", hash = "sha256:22db1268d7b035800a8c1d225e3806f8b35748f1f5191cbcf364f06fbe11104d", size = 69380, upload-time = "2025-03-21T14:07:34.673Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + +[[package]] +name = "product-analysis-module" +version = "0.1.0" +source = { editable = "modules/product_analysis_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, +] + +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + +[[package]] +name = "proto-plus" +version = "1.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, +] + +[[package]] +name = "protobuf" +version = "5.29.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, +] + +[[package]] +name = "psutil" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, +] + +[[package]] +name = "psycopg" +version = "3.2.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/f1/0258a123c045afaf3c3b60c22ccff077bceeb24b8dc2c593270899353bd0/psycopg-3.2.10.tar.gz", hash = "sha256:0bce99269d16ed18401683a8569b2c5abd94f72f8364856d56c0389bcd50972a", size = 160380, upload-time = "2025-09-08T09:13:37.775Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/90/422ffbbeeb9418c795dae2a768db860401446af0c6768bc061ce22325f58/psycopg-3.2.10-py3-none-any.whl", hash = "sha256:ab5caf09a9ec42e314a21f5216dbcceac528e0e05142e42eea83a3b28b320ac3", size = 206586, upload-time = "2025-09-08T09:07:50.121Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] +pool = [ + { name = "psycopg-pool" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.2.10" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/8c/f15bd09a0cc09f010c1462f1cb846d7d2706f0f6226ef8e953328243edcc/psycopg_binary-3.2.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db0eb06a19e4c64a08db0db80875ede44939af6a2afc281762c338fad5d6e547", size = 4002654, upload-time = "2025-09-08T09:08:49.779Z" }, + { url = "https://files.pythonhosted.org/packages/c9/df/9b7c9db70b624b96544560d062c27030a817e932f1fa803b58e25b26dcdd/psycopg_binary-3.2.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d922fdd49ed17c558b6b2f9ae2054c3d0cced2a34e079ce5a41c86904d0203f7", size = 4074650, upload-time = "2025-09-08T09:08:57.53Z" }, + { url = "https://files.pythonhosted.org/packages/6b/32/7aba5874e1dfd90bc3dcd26dd9200ae65e1e6e169230759dad60139f1b99/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d557a94cd6d2e775b3af6cc0bd0ff0d9d641820b5cc3060ccf1f5ca2bf971217", size = 4630536, upload-time = "2025-09-08T09:09:03.492Z" }, + { url = "https://files.pythonhosted.org/packages/7d/b1/a430d08b4eb28dc534181eb68a9c2a9e90b77c0e2933e338790534e7dce0/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:29b6bb87959515bc8b6abef10d8d23a9a681f03e48e9f0c8adb4b9fb7fa73f11", size = 4728387, upload-time = "2025-09-08T09:09:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d4/26d0fa9e8e7c05f0338024d2822a3740fac6093999443ad54e164f154bcc/psycopg_binary-3.2.10-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1b29285474e3339d0840e1b5079fdb0481914108f92ec62de0c87ae333c60b24", size = 4413805, upload-time = "2025-09-08T09:09:13.704Z" }, + { url = "https://files.pythonhosted.org/packages/c9/f2/d05c037c02e2ac4cb1c5b895c6c82428b3eaa0c48d08767b771bc2ea155a/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:62590dd113d10cd9c08251cb80b32e2e8aaf01ece04a700322e776b1d216959f", size = 3886830, upload-time = "2025-09-08T09:09:18.102Z" }, + { url = "https://files.pythonhosted.org/packages/8f/84/db3dee4335cd80c56e173a5ffbda6d17a7a10eeed030378d9adf3ab19ea7/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:764a5b9b40ad371c55dfdf95374d89e44a82fd62272d4fceebea0adb8930e2fb", size = 3568543, upload-time = "2025-09-08T09:09:22.765Z" }, + { url = "https://files.pythonhosted.org/packages/1b/45/4117274f24b8d49b8a9c1cb60488bb172ac9e57b8f804726115c332d16f8/psycopg_binary-3.2.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bd3676a04970cf825d2c771b0c147f91182c5a3653e0dbe958e12383668d0f79", size = 3610614, upload-time = "2025-09-08T09:09:27.534Z" }, + { url = "https://files.pythonhosted.org/packages/3c/22/f1b294dfc8af32a96a363aa99c0ebb530fc1c372a424c54a862dcf77ef47/psycopg_binary-3.2.10-cp311-cp311-win_amd64.whl", hash = "sha256:646048f46192c8d23786cc6ef19f35b7488d4110396391e407eca695fdfe9dcd", size = 2888340, upload-time = "2025-09-08T09:09:32.696Z" }, + { url = "https://files.pythonhosted.org/packages/a6/34/91c127fdedf8b270b1e3acc9f849d07ee8b80194379590c6f48dcc842924/psycopg_binary-3.2.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1dee2f4d2adc9adacbfecf8254bd82f6ac95cff707e1b9b99aa721cd1ef16b47", size = 3983963, upload-time = "2025-09-08T09:09:38.454Z" }, + { url = "https://files.pythonhosted.org/packages/1e/03/1d10ce2bf70cf549a8019639dc0c49be03e41092901d4324371a968b8c01/psycopg_binary-3.2.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8b45e65383da9c4a42a56f817973e521e893f4faae897fe9f1a971f9fe799742", size = 4069171, upload-time = "2025-09-08T09:09:44.395Z" }, + { url = "https://files.pythonhosted.org/packages/4c/5e/39cb924d6e119145aa5fc5532f48e79c67e13a76675e9366c327098db7b5/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:484d2b1659afe0f8f1cef5ea960bb640e96fa864faf917086f9f833f5c7a8034", size = 4610780, upload-time = "2025-09-08T09:09:53.073Z" }, + { url = "https://files.pythonhosted.org/packages/20/05/5a1282ebc4e39f5890abdd4bb7edfe9d19e4667497a1793ad288a8b81826/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:3bb4046973264ebc8cb7e20a83882d68577c1f26a6f8ad4fe52e4468cd9a8eee", size = 4700479, upload-time = "2025-09-08T09:09:58.183Z" }, + { url = "https://files.pythonhosted.org/packages/af/7a/e1c06e558ca3f37b7e6b002e555ebcfce0bf4dee6f3ae589a7444e16ce17/psycopg_binary-3.2.10-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:14bcbcac0cab465d88b2581e43ec01af4b01c9833e663f1352e05cb41be19e44", size = 4391772, upload-time = "2025-09-08T09:10:04.406Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d6/56f449c86988c9a97dc6c5f31d3689cfe8aedb37f2a02bd3e3882465d385/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:70bb7f665587dfd79e69f48b34efe226149454d7aab138ed22d5431d703de2f6", size = 3858214, upload-time = "2025-09-08T09:10:09.693Z" }, + { url = "https://files.pythonhosted.org/packages/93/56/f9eed67c9a1701b1e315f3687ff85f2f22a0a7d0eae4505cff65ef2f2679/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d2fe9eaa367f6171ab1a21a7dcb335eb2398be7f8bb7e04a20e2260aedc6f782", size = 3528051, upload-time = "2025-09-08T09:10:13.423Z" }, + { url = "https://files.pythonhosted.org/packages/25/cc/636709c72540cb859566537c0a03e46c3d2c4c4c2e13f78df46b6c4082b3/psycopg_binary-3.2.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:299834cce3eec0c48aae5a5207fc8f0c558fd65f2ceab1a36693329847da956b", size = 3580117, upload-time = "2025-09-08T09:10:17.81Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a8/a2c822fa06b0dbbb8ad4b0221da2534f77bac54332d2971dbf930f64be5a/psycopg_binary-3.2.10-cp312-cp312-win_amd64.whl", hash = "sha256:e037aac8dc894d147ef33056fc826ee5072977107a3fdf06122224353a057598", size = 2878872, upload-time = "2025-09-08T09:10:22.162Z" }, + { url = "https://files.pythonhosted.org/packages/3a/80/db840f7ebf948ab05b4793ad34d4da6ad251829d6c02714445ae8b5f1403/psycopg_binary-3.2.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:55b14f2402be027fe1568bc6c4d75ac34628ff5442a70f74137dadf99f738e3b", size = 3982057, upload-time = "2025-09-08T09:10:28.725Z" }, + { url = "https://files.pythonhosted.org/packages/2d/53/39308328bb8388b1ec3501a16128c5ada405f217c6d91b3d921b9f3c5604/psycopg_binary-3.2.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:43d803fb4e108a67c78ba58f3e6855437ca25d56504cae7ebbfbd8fce9b59247", size = 4066830, upload-time = "2025-09-08T09:10:34.083Z" }, + { url = "https://files.pythonhosted.org/packages/e7/5a/18e6f41b40c71197479468cb18703b2999c6e4ab06f9c05df3bf416a55d7/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:470594d303928ab72a1ffd179c9c7bde9d00f76711d6b0c28f8a46ddf56d9807", size = 4610747, upload-time = "2025-09-08T09:10:39.697Z" }, + { url = "https://files.pythonhosted.org/packages/be/ab/9198fed279aca238c245553ec16504179d21aad049958a2865d0aa797db4/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a1d4e4d309049e3cb61269652a3ca56cb598da30ecd7eb8cea561e0d18bc1a43", size = 4700301, upload-time = "2025-09-08T09:10:44.715Z" }, + { url = "https://files.pythonhosted.org/packages/fc/0d/59024313b5e6c5da3e2a016103494c609d73a95157a86317e0f600c8acb3/psycopg_binary-3.2.10-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a92ff1c2cd79b3966d6a87e26ceb222ecd5581b5ae4b58961f126af806a861ed", size = 4392679, upload-time = "2025-09-08T09:10:49.106Z" }, + { url = "https://files.pythonhosted.org/packages/ff/47/21ef15d8a66e3a7a76a177f885173d27f0c5cbe39f5dd6eda9832d6b4e19/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac0365398947879c9827b319217096be727da16c94422e0eb3cf98c930643162", size = 3857881, upload-time = "2025-09-08T09:10:56.75Z" }, + { url = "https://files.pythonhosted.org/packages/af/35/c5e5402ccd40016f15d708bbf343b8cf107a58f8ae34d14dc178fdea4fd4/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:42ee399c2613b470a87084ed79b06d9d277f19b0457c10e03a4aef7059097abc", size = 3531135, upload-time = "2025-09-08T09:11:03.346Z" }, + { url = "https://files.pythonhosted.org/packages/e6/e2/9b82946859001fe5e546c8749991b8b3b283f40d51bdc897d7a8e13e0a5e/psycopg_binary-3.2.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2028073fc12cd70ba003309d1439c0c4afab4a7eee7653b8c91213064fffe12b", size = 3581813, upload-time = "2025-09-08T09:11:08.76Z" }, + { url = "https://files.pythonhosted.org/packages/c5/91/c10cfccb75464adb4781486e0014ecd7c2ad6decf6cbe0afd8db65ac2bc9/psycopg_binary-3.2.10-cp313-cp313-win_amd64.whl", hash = "sha256:8390db6d2010ffcaf7f2b42339a2da620a7125d37029c1f9b72dfb04a8e7be6f", size = 2881466, upload-time = "2025-09-08T09:11:14.078Z" }, + { url = "https://files.pythonhosted.org/packages/fd/89/b0702ba0d007cc787dd7a205212c8c8cae229d1e7214c8e27bdd3b13d33e/psycopg_binary-3.2.10-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b34c278a58aa79562afe7f45e0455b1f4cad5974fc3d5674cc5f1f9f57e97fc5", size = 3981253, upload-time = "2025-09-08T09:11:19.864Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c9/e51ac72ac34d1d8ea7fd861008ad8de60e56997f5bd3fbae7536570f6f58/psycopg_binary-3.2.10-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:810f65b9ef1fe9dddb5c05937884ea9563aaf4e1a2c3d138205231ed5f439511", size = 4067542, upload-time = "2025-09-08T09:11:25.366Z" }, + { url = "https://files.pythonhosted.org/packages/d6/27/49625c79ae89959a070c1fb63ebb5c6eed426fa09e15086b6f5b626fcdc2/psycopg_binary-3.2.10-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8923487c3898c65e1450847e15d734bb2e6adbd2e79d2d1dd5ad829a1306bdc0", size = 4615338, upload-time = "2025-09-08T09:11:31.079Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0d/9fdb5482f50f56303770ea8a3b1c1f32105762da731c7e2a4f425e0b3887/psycopg_binary-3.2.10-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7950ff79df7a453ac8a7d7a74694055b6c15905b0a2b6e3c99eb59c51a3f9bf7", size = 4703401, upload-time = "2025-09-08T09:11:38.718Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f3/eb2f75ca2c090bf1d0c90d6da29ef340876fe4533bcfc072a9fd94dd52b4/psycopg_binary-3.2.10-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0c2b95e83fda70ed2b0b4fadd8538572e4a4d987b721823981862d1ab56cc760", size = 4393458, upload-time = "2025-09-08T09:11:44.114Z" }, + { url = "https://files.pythonhosted.org/packages/20/2e/887abe0591b2f1c1af31164b9efb46c5763e4418f403503bc9fbddaa02ef/psycopg_binary-3.2.10-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20384985fbc650c09a547a13c6d7f91bb42020d38ceafd2b68b7fc4a48a1f160", size = 3863733, upload-time = "2025-09-08T09:11:49.237Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8c/9446e3a84187220a98657ef778518f9b44eba55b1f6c3e8300d229ec9930/psycopg_binary-3.2.10-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:1f6982609b8ff8fcd67299b67cd5787da1876f3bb28fedd547262cfa8ddedf94", size = 3535121, upload-time = "2025-09-08T09:11:53.887Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e1/f0382c956bfaa951a0dbd4d5a354acf093ef7e5219996958143dfd2bf37d/psycopg_binary-3.2.10-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bf30dcf6aaaa8d4779a20d2158bdf81cc8e84ce8eee595d748a7671c70c7b890", size = 3584235, upload-time = "2025-09-08T09:12:01.118Z" }, + { url = "https://files.pythonhosted.org/packages/5a/dd/464bd739bacb3b745a1c93bc15f20f0b1e27f0a64ec693367794b398673b/psycopg_binary-3.2.10-cp314-cp314-win_amd64.whl", hash = "sha256:d5c6a66a76022af41970bf19f51bc6bf87bd10165783dd1d40484bfd87d6b382", size = 2973554, upload-time = "2025-09-08T09:12:05.884Z" }, +] + +[[package]] +name = "psycopg-pool" +version = "3.2.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770, upload-time = "2025-02-26T12:03:47.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252, upload-time = "2025-02-26T12:03:45.073Z" }, +] + +[[package]] +name = "psycopg2" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/62/51/2007ea29e605957a17ac6357115d0c1a1b60c8c984951c19419b3474cdfd/psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11", size = 385672, upload-time = "2024-10-16T11:24:54.832Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/a2/c51ca3e667c34e7852157b665e3d49418e68182081060231d514dd823225/psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2", size = 1024538, upload-time = "2024-10-16T11:18:33.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/39/5a9a229bb5414abeb86e33b8fc8143ab0aecce5a7f698a53e31367d30caa/psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4", size = 1163736, upload-time = "2024-10-16T11:18:36.616Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/4623fad6076448df21c1a870c93a9774ad8a7b4dd1660223b59082dd8fec/psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067", size = 1025113, upload-time = "2024-10-16T11:18:40.148Z" }, + { url = "https://files.pythonhosted.org/packages/66/de/baed128ae0fc07460d9399d82e631ea31a1f171c0c4ae18f9808ac6759e3/psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e", size = 1163951, upload-time = "2024-10-16T11:18:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/ae/49/a6cfc94a9c483b1fa401fbcb23aca7892f60c7269c5ffa2ac408364f80dc/psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2", size = 2569060, upload-time = "2025-01-04T20:09:15.28Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pycryptodome" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152, upload-time = "2025-05-17T17:20:20.833Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348, upload-time = "2025-05-17T17:20:23.171Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033, upload-time = "2025-05-17T17:20:25.424Z" }, + { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142, upload-time = "2025-05-17T17:20:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384, upload-time = "2025-05-17T17:20:30.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237, upload-time = "2025-05-17T17:20:33.736Z" }, + { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898, upload-time = "2025-05-17T17:20:36.086Z" }, + { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197, upload-time = "2025-05-17T17:20:38.414Z" }, + { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600, upload-time = "2025-05-17T17:20:40.688Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740, upload-time = "2025-05-17T17:20:42.413Z" }, + { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685, upload-time = "2025-05-17T17:20:44.388Z" }, + { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" }, + { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" }, + { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" }, + { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" }, + { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[package.optional-dependencies] +email = [ + { name = "email-validator" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-extra-types" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/10/fb64987804cde41bcc39d9cd757cd5f2bb5d97b389d81aa70238b14b8a7e/pydantic_extra_types-2.10.6.tar.gz", hash = "sha256:c63d70bf684366e6bbe1f4ee3957952ebe6973d41e7802aea0b770d06b116aeb", size = 141858, upload-time = "2025-10-08T13:47:49.483Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/04/5c918669096da8d1c9ec7bb716bd72e755526103a61bc5e76a3e4fb23b53/pydantic_extra_types-2.10.6-py3-none-any.whl", hash = "sha256:6106c448316d30abf721b5b9fecc65e983ef2614399a24142d689c7546cc246a", size = 40949, upload-time = "2025-10-08T13:47:48.268Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, +] + +[[package]] +name = "pydub" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/9a/e6bca0eed82db26562c73b5076539a4a08d3cffd19c3cc5913a3e61145fd/pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f", size = 38326, upload-time = "2021-03-10T02:09:54.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pyloudnorm" +version = "0.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "future" }, + { name = "numpy" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/b5/39d59c44ecd828fabfdbd796b50a561e6543ca90ef440ab307374f107856/pyloudnorm-0.1.1.tar.gz", hash = "sha256:63cd4e197dea4e7795160ea08ed02d318091bce883e436a6dbc5963326b71e1e", size = 8588, upload-time = "2023-01-05T16:11:28.601Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/f5/6724805521ab4e723a12182f92374031032aff28a8a89dc8505c52b79032/pyloudnorm-0.1.1-py3-none-any.whl", hash = "sha256:d7f12ebdd097a464d87ce2878fc4d942f15f8233e26cc03f33fefa226f869a14", size = 9636, upload-time = "2023-01-05T16:11:27.331Z" }, +] + +[[package]] +name = "pymupdf" +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/35/031556dfc0d332d8e9ed9b61ca105138606d3f8971b9eb02e20118629334/pymupdf-1.26.4.tar.gz", hash = "sha256:be13a066d42bfaed343a488168656637c4d9843ddc63b768dc827c9dfc6b9989", size = 83077563, upload-time = "2025-08-25T14:20:29.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/ae/3be722886cc7be2093585cd94f466db1199133ab005645a7a567b249560f/pymupdf-1.26.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cb95562a0a63ce906fd788bdad5239063b63068cf4a991684f43acb09052cb99", size = 23061974, upload-time = "2025-08-25T14:16:58.811Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b0/9a451d837e1fe18ecdbfbc34a6499f153c8a008763229cc634725383a93f/pymupdf-1.26.4-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:67e9e6b45832c33726651c2a031e9a20108fd9e759140b9e843f934de813a7ff", size = 22410112, upload-time = "2025-08-25T14:17:24.511Z" }, + { url = "https://files.pythonhosted.org/packages/d8/13/0916e8e02cb5453161fb9d9167c747d0a20d58633e30728645374153f815/pymupdf-1.26.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2604f687dd02b6a1b98c81bd8becfc0024899a2d2085adfe3f9e91607721fd22", size = 23454948, upload-time = "2025-08-25T21:20:07.71Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c6/d3cfafc75d383603884edeabe4821a549345df954a88d79e6764e2c87601/pymupdf-1.26.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:973a6dda61ebd34040e4df3753bf004b669017663fbbfdaa294d44eceba98de0", size = 24060686, upload-time = "2025-08-25T14:17:56.536Z" }, + { url = "https://files.pythonhosted.org/packages/72/08/035e9d22c801e801bba50c6745bc90ba8696a042fe2c68793e28bf0c3b07/pymupdf-1.26.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:299a49797df5b558e695647fa791329ba3911cbbb31ed65f24a6266c118ef1a7", size = 24265046, upload-time = "2025-08-25T14:18:21.238Z" }, + { url = "https://files.pythonhosted.org/packages/28/8c/c201e4846ec0fb6ae5d52aa3a5d66f9355f0c69fb94230265714df0de65e/pymupdf-1.26.4-cp39-abi3-win32.whl", hash = "sha256:51b38379aad8c71bd7a8dd24d93fbe7580c2a5d9d7e1f9cd29ebbba315aa1bd1", size = 17127332, upload-time = "2025-08-25T14:18:39.132Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c4/87d27b108c2f6d773aa5183c5ae367b2a99296ea4bc16eb79f453c679e30/pymupdf-1.26.4-cp39-abi3-win_amd64.whl", hash = "sha256:0b6345a93a9afd28de2567e433055e873205c52e6b920b129ca50e836a3aeec6", size = 18743491, upload-time = "2025-08-25T14:19:01.104Z" }, +] + +[[package]] +name = "pymupdf4llm" +version = "0.0.17" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymupdf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/3c/1a530a410bdf76d83289bf30b3b86236d338b3f5f21842790c2cf7e9c1f6/pymupdf4llm-0.0.17.tar.gz", hash = "sha256:27287ef9fe0217cf37841a3ef2bcf70da2553c43d95ea39b664a6de6485678c3", size = 25180, upload-time = "2024-09-21T18:40:01.033Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/af/1576ecfc8a62d31c0c8b34b856e52f6b05f1d76546dbac0e1d037f044a9e/pymupdf4llm-0.0.17-py3-none-any.whl", hash = "sha256:26de9996945f15e3ca507908f80dc18a959f5b5214bb2e302c7f7034089665a0", size = 26190, upload-time = "2024-09-21T18:40:03.097Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, +] + +[[package]] +name = "pypdf" +version = "4.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/65/2ed7c9e1d31d860f096061b3dd2d665f501e09faaa0409a3f0d719d2a16d/pypdf-4.3.1.tar.gz", hash = "sha256:b2f37fe9a3030aa97ca86067a56ba3f9d3565f9a791b305c7355d8392c30d91b", size = 293266, upload-time = "2024-07-21T19:35:20.207Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/60/eccdd92dd4af3e4bea6d6a342f7588c618a15b9bec4b968af581e498bcc4/pypdf-4.3.1-py3-none-any.whl", hash = "sha256:64b31da97eda0771ef22edb1bfecd5deee4b72c3d1736b7df2689805076d6418", size = 295825, upload-time = "2024-07-21T19:35:18.126Z" }, +] + +[[package]] +name = "pyreadline3" +version = "3.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8e/c4/453c52c659521066969523e87d85d54139bbd17b78f09532fb8eb8cdb58e/pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f", size = 54156, upload-time = "2025-03-25T06:22:28.883Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-jose" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, +] + +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/0f/9c55ac6c84c0336e22a26fa84ca6c51d58d7ac3a2d78b0dfa8748826c883/python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026", size = 31516, upload-time = "2024-02-10T13:32:04.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/47/444768600d9e0ebc82f8e347775d24aef8f6348cf00e9fa0e81910814e6d/python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215", size = 22299, upload-time = "2024-02-10T13:32:02.969Z" }, +] + +[[package]] +name = "python-pptx" +version = "0.6.23" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "lxml" }, + { name = "pillow" }, + { name = "xlsxwriter" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/e7/aeaf794b2d440da609684494075e64cfada248026ecb265807d0668cdd00/python-pptx-0.6.23.tar.gz", hash = "sha256:587497ff28e779ab18dbb074f6d4052893c85dedc95ed75df319364f331fedee", size = 10083771, upload-time = "2023-11-02T21:35:31.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/49/6eee83072983473e9905ffddd5c2032b9a0ca4616425560d6d582287b467/python_pptx-0.6.23-py3-none-any.whl", hash = "sha256:dd0527194627a2b7cc05f3ba23ecaa2d9a0d5ac9b6193a28ed1b7a716f4217d4", size = 471575, upload-time = "2023-11-02T21:35:21.747Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rag-ingestion" +version = "0.0.1" +source = { editable = "background_jobs/rag_ingestion" } +dependencies = [ + { name = "accelerate" }, + { name = "asyncpg" }, + { name = "db-repo-module" }, + { name = "flo-cloud" }, + { name = "flo-utils" }, + { name = "httpx" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-dotenv" }, + { name = "textract" }, + { name = "tiktoken" }, + { name = "torchvision" }, + { name = "transformers" }, +] + +[package.metadata] +requires-dist = [ + { name = "accelerate", specifier = ">=0.33.0,<1.0.0" }, + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "flo-utils", editable = "packages/flo_utils" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "pyjwt", extras = ["crypto"], specifier = ">=2.9.0" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, + { name = "textract", specifier = ">=1.6.5" }, + { name = "tiktoken", specifier = ">=0.9.0" }, + { name = "torchvision", specifier = "==0.16.0" }, + { name = "transformers", specifier = ">=4.45.0" }, +] + +[[package]] +name = "redis" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, + { name = "pyjwt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/cf/128b1b6d7086200c9f387bd4be9b2572a30b90745ef078bd8b235042dc9f/redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c", size = 4626200, upload-time = "2025-07-25T08:06:27.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/26/5c5fa0e83c3621db835cfc1f1d789b37e7fa99ed54423b5f519beb931aa7/redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97", size = 272833, upload-time = "2025-07-25T08:06:26.317Z" }, +] + +[[package]] +name = "redshift-connector" +version = "2.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "boto3" }, + { name = "botocore" }, + { name = "lxml" }, + { name = "packaging" }, + { name = "pytz" }, + { name = "requests" }, + { name = "scramp" }, + { name = "setuptools" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/2b/f7603f0acf01de0c9f7e4294c8a5ac346cc12cb93b81e52eaddd15d37400/redshift_connector-2.1.8-py3-none-any.whl", hash = "sha256:160dff3720e8facb5f50f9585f3f68dd5565dd0d986e4e6a879371313da1b36e", size = 139605, upload-time = "2025-07-01T22:20:49.294Z" }, +] + +[[package]] +name = "regex" +version = "2025.9.18" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, + { url = "https://files.pythonhosted.org/packages/d2/c7/5c48206a60ce33711cf7dcaeaed10dd737733a3569dc7e1dce324dd48f30/regex-2025.9.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a40f929cd907c7e8ac7566ac76225a77701a6221bca937bdb70d56cb61f57b2", size = 485955, upload-time = "2025-09-19T00:36:26.822Z" }, + { url = "https://files.pythonhosted.org/packages/e9/be/74fc6bb19a3c491ec1ace943e622b5a8539068771e8705e469b2da2306a7/regex-2025.9.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c90471671c2cdf914e58b6af62420ea9ecd06d1554d7474d50133ff26ae88feb", size = 289583, upload-time = "2025-09-19T00:36:28.577Z" }, + { url = "https://files.pythonhosted.org/packages/25/c4/9ceaa433cb5dc515765560f22a19578b95b92ff12526e5a259321c4fc1a0/regex-2025.9.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a351aff9e07a2dabb5022ead6380cff17a4f10e4feb15f9100ee56c4d6d06af", size = 287000, upload-time = "2025-09-19T00:36:30.161Z" }, + { url = "https://files.pythonhosted.org/packages/7d/e6/68bc9393cb4dc68018456568c048ac035854b042bc7c33cb9b99b0680afa/regex-2025.9.18-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc4b8e9d16e20ddfe16430c23468a8707ccad3365b06d4536142e71823f3ca29", size = 797535, upload-time = "2025-09-19T00:36:31.876Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/ebae9032d34b78ecfe9bd4b5e6575b55351dc8513485bb92326613732b8c/regex-2025.9.18-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b8cdbddf2db1c5e80338ba2daa3cfa3dec73a46fff2a7dda087c8efbf12d62f", size = 862603, upload-time = "2025-09-19T00:36:33.344Z" }, + { url = "https://files.pythonhosted.org/packages/3b/74/12332c54b3882557a4bcd2b99f8be581f5c6a43cf1660a85b460dd8ff468/regex-2025.9.18-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a276937d9d75085b2c91fb48244349c6954f05ee97bba0963ce24a9d915b8b68", size = 910829, upload-time = "2025-09-19T00:36:34.826Z" }, + { url = "https://files.pythonhosted.org/packages/86/70/ba42d5ed606ee275f2465bfc0e2208755b06cdabd0f4c7c4b614d51b57ab/regex-2025.9.18-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92a8e375ccdc1256401c90e9dc02b8642894443d549ff5e25e36d7cf8a80c783", size = 802059, upload-time = "2025-09-19T00:36:36.664Z" }, + { url = "https://files.pythonhosted.org/packages/da/c5/fcb017e56396a7f2f8357412638d7e2963440b131a3ca549be25774b3641/regex-2025.9.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0dc6893b1f502d73037cf807a321cdc9be29ef3d6219f7970f842475873712ac", size = 786781, upload-time = "2025-09-19T00:36:38.168Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ee/21c4278b973f630adfb3bcb23d09d83625f3ab1ca6e40ebdffe69901c7a1/regex-2025.9.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a61e85bfc63d232ac14b015af1261f826260c8deb19401c0597dbb87a864361e", size = 856578, upload-time = "2025-09-19T00:36:40.129Z" }, + { url = "https://files.pythonhosted.org/packages/87/0b/de51550dc7274324435c8f1539373ac63019b0525ad720132866fff4a16a/regex-2025.9.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1ef86a9ebc53f379d921fb9a7e42b92059ad3ee800fcd9e0fe6181090e9f6c23", size = 849119, upload-time = "2025-09-19T00:36:41.651Z" }, + { url = "https://files.pythonhosted.org/packages/60/52/383d3044fc5154d9ffe4321696ee5b2ee4833a28c29b137c22c33f41885b/regex-2025.9.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d3bc882119764ba3a119fbf2bd4f1b47bc56c1da5d42df4ed54ae1e8e66fdf8f", size = 788219, upload-time = "2025-09-19T00:36:43.575Z" }, + { url = "https://files.pythonhosted.org/packages/20/bd/2614fc302671b7359972ea212f0e3a92df4414aaeacab054a8ce80a86073/regex-2025.9.18-cp313-cp313-win32.whl", hash = "sha256:3810a65675845c3bdfa58c3c7d88624356dd6ee2fc186628295e0969005f928d", size = 264517, upload-time = "2025-09-19T00:36:45.503Z" }, + { url = "https://files.pythonhosted.org/packages/07/0f/ab5c1581e6563a7bffdc1974fb2d25f05689b88e2d416525271f232b1946/regex-2025.9.18-cp313-cp313-win_amd64.whl", hash = "sha256:16eaf74b3c4180ede88f620f299e474913ab6924d5c4b89b3833bc2345d83b3d", size = 275481, upload-time = "2025-09-19T00:36:46.965Z" }, + { url = "https://files.pythonhosted.org/packages/49/22/ee47672bc7958f8c5667a587c2600a4fba8b6bab6e86bd6d3e2b5f7cac42/regex-2025.9.18-cp313-cp313-win_arm64.whl", hash = "sha256:4dc98ba7dd66bd1261927a9f49bd5ee2bcb3660f7962f1ec02617280fc00f5eb", size = 268598, upload-time = "2025-09-19T00:36:48.314Z" }, + { url = "https://files.pythonhosted.org/packages/e8/83/6887e16a187c6226cb85d8301e47d3b73ecc4505a3a13d8da2096b44fd76/regex-2025.9.18-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:fe5d50572bc885a0a799410a717c42b1a6b50e2f45872e2b40f4f288f9bce8a2", size = 489765, upload-time = "2025-09-19T00:36:49.996Z" }, + { url = "https://files.pythonhosted.org/packages/51/c5/e2f7325301ea2916ff301c8d963ba66b1b2c1b06694191df80a9c4fea5d0/regex-2025.9.18-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b9d9a2d6cda6621551ca8cf7a06f103adf72831153f3c0d982386110870c4d3", size = 291228, upload-time = "2025-09-19T00:36:51.654Z" }, + { url = "https://files.pythonhosted.org/packages/91/60/7d229d2bc6961289e864a3a3cfebf7d0d250e2e65323a8952cbb7e22d824/regex-2025.9.18-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:13202e4c4ac0ef9a317fff817674b293c8f7e8c68d3190377d8d8b749f566e12", size = 289270, upload-time = "2025-09-19T00:36:53.118Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/b4f06868ee2958ff6430df89857fbf3d43014bbf35538b6ec96c2704e15d/regex-2025.9.18-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:874ff523b0fecffb090f80ae53dc93538f8db954c8bb5505f05b7787ab3402a0", size = 806326, upload-time = "2025-09-19T00:36:54.631Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e4/bca99034a8f1b9b62ccf337402a8e5b959dd5ba0e5e5b2ead70273df3277/regex-2025.9.18-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d13ab0490128f2bb45d596f754148cd750411afc97e813e4b3a61cf278a23bb6", size = 871556, upload-time = "2025-09-19T00:36:56.208Z" }, + { url = "https://files.pythonhosted.org/packages/6d/df/e06ffaf078a162f6dd6b101a5ea9b44696dca860a48136b3ae4a9caf25e2/regex-2025.9.18-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:05440bc172bc4b4b37fb9667e796597419404dbba62e171e1f826d7d2a9ebcef", size = 913817, upload-time = "2025-09-19T00:36:57.807Z" }, + { url = "https://files.pythonhosted.org/packages/9e/05/25b05480b63292fd8e84800b1648e160ca778127b8d2367a0a258fa2e225/regex-2025.9.18-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5514b8e4031fdfaa3d27e92c75719cbe7f379e28cacd939807289bce76d0e35a", size = 811055, upload-time = "2025-09-19T00:36:59.762Z" }, + { url = "https://files.pythonhosted.org/packages/70/97/7bc7574655eb651ba3a916ed4b1be6798ae97af30104f655d8efd0cab24b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:65d3c38c39efce73e0d9dc019697b39903ba25b1ad45ebbd730d2cf32741f40d", size = 794534, upload-time = "2025-09-19T00:37:01.405Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c2/d5da49166a52dda879855ecdba0117f073583db2b39bb47ce9a3378a8e9e/regex-2025.9.18-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ae77e447ebc144d5a26d50055c6ddba1d6ad4a865a560ec7200b8b06bc529368", size = 866684, upload-time = "2025-09-19T00:37:03.441Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2d/0a5c4e6ec417de56b89ff4418ecc72f7e3feca806824c75ad0bbdae0516b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e3ef8cf53dc8df49d7e28a356cf824e3623764e9833348b655cfed4524ab8a90", size = 853282, upload-time = "2025-09-19T00:37:04.985Z" }, + { url = "https://files.pythonhosted.org/packages/f4/8e/d656af63e31a86572ec829665d6fa06eae7e144771e0330650a8bb865635/regex-2025.9.18-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9feb29817df349c976da9a0debf775c5c33fc1c8ad7b9f025825da99374770b7", size = 797830, upload-time = "2025-09-19T00:37:06.697Z" }, + { url = "https://files.pythonhosted.org/packages/db/ce/06edc89df8f7b83ffd321b6071be4c54dc7332c0f77860edc40ce57d757b/regex-2025.9.18-cp313-cp313t-win32.whl", hash = "sha256:168be0d2f9b9d13076940b1ed774f98595b4e3c7fc54584bba81b3cc4181742e", size = 267281, upload-time = "2025-09-19T00:37:08.568Z" }, + { url = "https://files.pythonhosted.org/packages/83/9a/2b5d9c8b307a451fd17068719d971d3634ca29864b89ed5c18e499446d4a/regex-2025.9.18-cp313-cp313t-win_amd64.whl", hash = "sha256:d59ecf3bb549e491c8104fea7313f3563c7b048e01287db0a90485734a70a730", size = 278724, upload-time = "2025-09-19T00:37:10.023Z" }, + { url = "https://files.pythonhosted.org/packages/3d/70/177d31e8089a278a764f8ec9a3faac8d14a312d622a47385d4b43905806f/regex-2025.9.18-cp313-cp313t-win_arm64.whl", hash = "sha256:dbef80defe9fb21310948a2595420b36c6d641d9bea4c991175829b2cc4bc06a", size = 269771, upload-time = "2025-09-19T00:37:13.041Z" }, + { url = "https://files.pythonhosted.org/packages/44/b7/3b4663aa3b4af16819f2ab6a78c4111c7e9b066725d8107753c2257448a5/regex-2025.9.18-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c6db75b51acf277997f3adcd0ad89045d856190d13359f15ab5dda21581d9129", size = 486130, upload-time = "2025-09-19T00:37:14.527Z" }, + { url = "https://files.pythonhosted.org/packages/80/5b/4533f5d7ac9c6a02a4725fe8883de2aebc713e67e842c04cf02626afb747/regex-2025.9.18-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8f9698b6f6895d6db810e0bda5364f9ceb9e5b11328700a90cae573574f61eea", size = 289539, upload-time = "2025-09-19T00:37:16.356Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8d/5ab6797c2750985f79e9995fad3254caa4520846580f266ae3b56d1cae58/regex-2025.9.18-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29cd86aa7cb13a37d0f0d7c21d8d949fe402ffa0ea697e635afedd97ab4b69f1", size = 287233, upload-time = "2025-09-19T00:37:18.025Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/95afcb02ba8d3a64e6ffeb801718ce73471ad6440c55d993f65a4a5e7a92/regex-2025.9.18-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c9f285a071ee55cd9583ba24dde006e53e17780bb309baa8e4289cd472bcc47", size = 797876, upload-time = "2025-09-19T00:37:19.609Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fb/720b1f49cec1f3b5a9fea5b34cd22b88b5ebccc8c1b5de9cc6f65eed165a/regex-2025.9.18-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5adf266f730431e3be9021d3e5b8d5ee65e563fec2883ea8093944d21863b379", size = 863385, upload-time = "2025-09-19T00:37:21.65Z" }, + { url = "https://files.pythonhosted.org/packages/a9/ca/e0d07ecf701e1616f015a720dc13b84c582024cbfbb3fc5394ae204adbd7/regex-2025.9.18-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1137cabc0f38807de79e28d3f6e3e3f2cc8cfb26bead754d02e6d1de5f679203", size = 910220, upload-time = "2025-09-19T00:37:23.723Z" }, + { url = "https://files.pythonhosted.org/packages/b6/45/bba86413b910b708eca705a5af62163d5d396d5f647ed9485580c7025209/regex-2025.9.18-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cc9e5525cada99699ca9223cce2d52e88c52a3d2a0e842bd53de5497c604164", size = 801827, upload-time = "2025-09-19T00:37:25.684Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a6/740fbd9fcac31a1305a8eed30b44bf0f7f1e042342be0a4722c0365ecfca/regex-2025.9.18-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bbb9246568f72dce29bcd433517c2be22c7791784b223a810225af3b50d1aafb", size = 786843, upload-time = "2025-09-19T00:37:27.62Z" }, + { url = "https://files.pythonhosted.org/packages/80/a7/0579e8560682645906da640c9055506465d809cb0f5415d9976f417209a6/regex-2025.9.18-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6a52219a93dd3d92c675383efff6ae18c982e2d7651c792b1e6d121055808743", size = 857430, upload-time = "2025-09-19T00:37:29.362Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9b/4dc96b6c17b38900cc9fee254fc9271d0dde044e82c78c0811b58754fde5/regex-2025.9.18-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:ae9b3840c5bd456780e3ddf2f737ab55a79b790f6409182012718a35c6d43282", size = 848612, upload-time = "2025-09-19T00:37:31.42Z" }, + { url = "https://files.pythonhosted.org/packages/b3/6a/6f659f99bebb1775e5ac81a3fb837b85897c1a4ef5acffd0ff8ffe7e67fb/regex-2025.9.18-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d488c236ac497c46a5ac2005a952c1a0e22a07be9f10c3e735bc7d1209a34773", size = 787967, upload-time = "2025-09-19T00:37:34.019Z" }, + { url = "https://files.pythonhosted.org/packages/61/35/9e35665f097c07cf384a6b90a1ac11b0b1693084a0b7a675b06f760496c6/regex-2025.9.18-cp314-cp314-win32.whl", hash = "sha256:0c3506682ea19beefe627a38872d8da65cc01ffa25ed3f2e422dffa1474f0788", size = 269847, upload-time = "2025-09-19T00:37:35.759Z" }, + { url = "https://files.pythonhosted.org/packages/af/64/27594dbe0f1590b82de2821ebfe9a359b44dcb9b65524876cd12fabc447b/regex-2025.9.18-cp314-cp314-win_amd64.whl", hash = "sha256:57929d0f92bebb2d1a83af372cd0ffba2263f13f376e19b1e4fa32aec4efddc3", size = 278755, upload-time = "2025-09-19T00:37:37.367Z" }, + { url = "https://files.pythonhosted.org/packages/30/a3/0cd8d0d342886bd7d7f252d701b20ae1a3c72dc7f34ef4b2d17790280a09/regex-2025.9.18-cp314-cp314-win_arm64.whl", hash = "sha256:6a4b44df31d34fa51aa5c995d3aa3c999cec4d69b9bd414a8be51984d859f06d", size = 271873, upload-time = "2025-09-19T00:37:39.125Z" }, + { url = "https://files.pythonhosted.org/packages/99/cb/8a1ab05ecf404e18b54348e293d9b7a60ec2bd7aa59e637020c5eea852e8/regex-2025.9.18-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b176326bcd544b5e9b17d6943f807697c0cb7351f6cfb45bf5637c95ff7e6306", size = 489773, upload-time = "2025-09-19T00:37:40.968Z" }, + { url = "https://files.pythonhosted.org/packages/93/3b/6543c9b7f7e734d2404fa2863d0d710c907bef99d4598760ed4563d634c3/regex-2025.9.18-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0ffd9e230b826b15b369391bec167baed57c7ce39efc35835448618860995946", size = 291221, upload-time = "2025-09-19T00:37:42.901Z" }, + { url = "https://files.pythonhosted.org/packages/cd/91/e9fdee6ad6bf708d98c5d17fded423dcb0661795a49cba1b4ffb8358377a/regex-2025.9.18-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ec46332c41add73f2b57e2f5b642f991f6b15e50e9f86285e08ffe3a512ac39f", size = 289268, upload-time = "2025-09-19T00:37:44.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/a6/bc3e8a918abe4741dadeaeb6c508e3a4ea847ff36030d820d89858f96a6c/regex-2025.9.18-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b80fa342ed1ea095168a3f116637bd1030d39c9ff38dc04e54ef7c521e01fc95", size = 806659, upload-time = "2025-09-19T00:37:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/2b/71/ea62dbeb55d9e6905c7b5a49f75615ea1373afcad95830047e4e310db979/regex-2025.9.18-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4d97071c0ba40f0cf2a93ed76e660654c399a0a04ab7d85472239460f3da84b", size = 871701, upload-time = "2025-09-19T00:37:48.882Z" }, + { url = "https://files.pythonhosted.org/packages/6a/90/fbe9dedb7dad24a3a4399c0bae64bfa932ec8922a0a9acf7bc88db30b161/regex-2025.9.18-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0ac936537ad87cef9e0e66c5144484206c1354224ee811ab1519a32373e411f3", size = 913742, upload-time = "2025-09-19T00:37:51.015Z" }, + { url = "https://files.pythonhosted.org/packages/f0/1c/47e4a8c0e73d41eb9eb9fdeba3b1b810110a5139a2526e82fd29c2d9f867/regex-2025.9.18-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dec57f96d4def58c422d212d414efe28218d58537b5445cf0c33afb1b4768571", size = 811117, upload-time = "2025-09-19T00:37:52.686Z" }, + { url = "https://files.pythonhosted.org/packages/2a/da/435f29fddfd015111523671e36d30af3342e8136a889159b05c1d9110480/regex-2025.9.18-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:48317233294648bf7cd068857f248e3a57222259a5304d32c7552e2284a1b2ad", size = 794647, upload-time = "2025-09-19T00:37:54.626Z" }, + { url = "https://files.pythonhosted.org/packages/23/66/df5e6dcca25c8bc57ce404eebc7342310a0d218db739d7882c9a2b5974a3/regex-2025.9.18-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:274687e62ea3cf54846a9b25fc48a04459de50af30a7bd0b61a9e38015983494", size = 866747, upload-time = "2025-09-19T00:37:56.367Z" }, + { url = "https://files.pythonhosted.org/packages/82/42/94392b39b531f2e469b2daa40acf454863733b674481fda17462a5ffadac/regex-2025.9.18-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a78722c86a3e7e6aadf9579e3b0ad78d955f2d1f1a8ca4f67d7ca258e8719d4b", size = 853434, upload-time = "2025-09-19T00:37:58.39Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f8/dcc64c7f7bbe58842a8f89622b50c58c3598fbbf4aad0a488d6df2c699f1/regex-2025.9.18-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:06104cd203cdef3ade989a1c45b6215bf42f8b9dd705ecc220c173233f7cba41", size = 798024, upload-time = "2025-09-19T00:38:00.397Z" }, + { url = "https://files.pythonhosted.org/packages/20/8d/edf1c5d5aa98f99a692313db813ec487732946784f8f93145e0153d910e5/regex-2025.9.18-cp314-cp314t-win32.whl", hash = "sha256:2e1eddc06eeaffd249c0adb6fafc19e2118e6308c60df9db27919e96b5656096", size = 273029, upload-time = "2025-09-19T00:38:02.383Z" }, + { url = "https://files.pythonhosted.org/packages/a7/24/02d4e4f88466f17b145f7ea2b2c11af3a942db6222429c2c146accf16054/regex-2025.9.18-cp314-cp314t-win_amd64.whl", hash = "sha256:8620d247fb8c0683ade51217b459cb4a1081c0405a3072235ba43a40d355c09a", size = 282680, upload-time = "2025-09-19T00:38:04.102Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a3/c64894858aaaa454caa7cc47e2f225b04d3ed08ad649eacf58d45817fad2/regex-2025.9.18-cp314-cp314t-win_arm64.whl", hash = "sha256:b7531a8ef61de2c647cdf68b3229b071e46ec326b3138b2180acb4275f470b01", size = 273034, upload-time = "2025-09-19T00:38:05.807Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "resampy" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numba" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/f1/34be702a69a5d272e844c98cee82351f880985cfbca0cc86378011078497/resampy-0.4.3.tar.gz", hash = "sha256:a0d1c28398f0e55994b739650afef4e3974115edbe96cd4bb81968425e916e47", size = 3080604, upload-time = "2024-03-05T20:36:08.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/b9/3b00ac340a1aab3389ebcc52c779914a44aadf7b0cb7a3bf053195735607/resampy-0.4.3-py3-none-any.whl", hash = "sha256:ad2ed64516b140a122d96704e32bc0f92b23f45419e8b8f478e5a05f83edcebd", size = 3076529, upload-time = "2024-03-05T20:36:02.439Z" }, +] + +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "rich-toolkit" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/d0/8f8de36e1abf8339b497ce700dd7251ca465ffca4a1976969b0eaeb596fb/rich_toolkit-0.17.0.tar.gz", hash = "sha256:17ca7a32e613001aa0945ddea27a246f6de01dfc4c12403254c057a8ee542977", size = 187955, upload-time = "2025-11-27T11:10:24.863Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/42/ef2ed40699567661d03b0b511ac46cf6cee736de8f3666819c12d6d20696/rich_toolkit-0.17.0-py3-none-any.whl", hash = "sha256:06fb47a5c5259d6b480287cd38aff5f551b6e1a307f90ed592453dd360e4e71e", size = 31412, upload-time = "2025-11-27T11:10:23.847Z" }, +] + +[[package]] +name = "rignore" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/f5/8bed2310abe4ae04b67a38374a4d311dd85220f5d8da56f47ae9361be0b0/rignore-0.7.6.tar.gz", hash = "sha256:00d3546cd793c30cb17921ce674d2c8f3a4b00501cb0e3dd0e82217dbeba2671", size = 57140, upload-time = "2025-11-05T21:41:21.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/41/b6e2be3069ef3b7f24e35d2911bd6deb83d20ed5642ad81d5a6d1c015473/rignore-0.7.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:40be8226e12d6653abbebaffaea2885f80374c1c8f76fe5ca9e0cadd120a272c", size = 885285, upload-time = "2025-11-05T20:42:39.763Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/ba7f561b6062402022887706a7f2b2c2e2e2a28f1e3839202b0a2f77e36d/rignore-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182f4e5e4064d947c756819446a7d4cdede8e756b8c81cf9e509683fe38778d7", size = 823882, upload-time = "2025-11-05T20:42:23.488Z" }, + { url = "https://files.pythonhosted.org/packages/f5/81/4087453df35a90b07370647b19017029324950c1b9137d54bf1f33843f17/rignore-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16b63047648a916a87be1e51bb5c009063f1b8b6f5afe4f04f875525507e63dc", size = 899362, upload-time = "2025-11-05T20:40:51.111Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c9/390a8fdfabb76d71416be773bd9f162977bd483084f68daf19da1dec88a6/rignore-0.7.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ba5524f5178deca4d7695e936604ebc742acb8958f9395776e1fcb8133f8257a", size = 873633, upload-time = "2025-11-05T20:41:06.193Z" }, + { url = "https://files.pythonhosted.org/packages/df/c9/79404fcb0faa76edfbc9df0901f8ef18568d1104919ebbbad6d608c888d1/rignore-0.7.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62020dbb89a1dd4b84ab3d60547b3b2eb2723641d5fb198463643f71eaaed57d", size = 1167633, upload-time = "2025-11-05T20:41:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/6e/8d/b3466d32d445d158a0aceb80919085baaae495b1f540fb942f91d93b5e5b/rignore-0.7.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34acd532769d5a6f153a52a98dcb81615c949ab11697ce26b2eb776af2e174d", size = 941434, upload-time = "2025-11-05T20:41:38.151Z" }, + { url = "https://files.pythonhosted.org/packages/e8/40/9cd949761a7af5bc27022a939c91ff622d29c7a0b66d0c13a863097dde2d/rignore-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c5e53b752f9de44dff7b3be3c98455ce3bf88e69d6dc0cf4f213346c5e3416c", size = 959461, upload-time = "2025-11-05T20:42:08.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/87/1e1a145731f73bdb7835e11f80da06f79a00d68b370d9a847de979575e6d/rignore-0.7.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25b3536d13a5d6409ce85f23936f044576eeebf7b6db1d078051b288410fc049", size = 985323, upload-time = "2025-11-05T20:41:52.735Z" }, + { url = "https://files.pythonhosted.org/packages/6c/31/1ecff992fc3f59c4fcdcb6c07d5f6c1e6dfb55ccda19c083aca9d86fa1c6/rignore-0.7.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e01cad2b0b92f6b1993f29fc01f23f2d78caf4bf93b11096d28e9d578eb08ce", size = 1079173, upload-time = "2025-11-05T21:40:12.007Z" }, + { url = "https://files.pythonhosted.org/packages/17/18/162eedadb4c2282fa4c521700dbf93c9b14b8842e8354f7d72b445b8d593/rignore-0.7.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5991e46ab9b4868334c9e372ab0892b0150f3f586ff2b1e314272caeb38aaedb", size = 1139012, upload-time = "2025-11-05T21:40:29.399Z" }, + { url = "https://files.pythonhosted.org/packages/78/96/a9ca398a8af74bb143ad66c2a31303c894111977e28b0d0eab03867f1b43/rignore-0.7.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6c8ae562e5d1246cba5eaeb92a47b2a279e7637102828dde41dcbe291f529a3e", size = 1118827, upload-time = "2025-11-05T21:40:46.6Z" }, + { url = "https://files.pythonhosted.org/packages/9f/22/1c1a65047df864def9a047dbb40bc0b580b8289a4280e62779cd61ae21f2/rignore-0.7.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaf938530dcc0b47c4cfa52807aa2e5bfd5ca6d57a621125fe293098692f6345", size = 1128182, upload-time = "2025-11-05T21:41:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f4/1526eb01fdc2235aca1fd9d0189bee4021d009a8dcb0161540238c24166e/rignore-0.7.6-cp311-cp311-win32.whl", hash = "sha256:166ebce373105dd485ec213a6a2695986346e60c94ff3d84eb532a237b24a4d5", size = 646547, upload-time = "2025-11-05T21:41:49.439Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c8/dda0983e1845706beb5826459781549a840fe5a7eb934abc523e8cd17814/rignore-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:44f35ee844b1a8cea50d056e6a595190ce9d42d3cccf9f19d280ae5f3058973a", size = 727139, upload-time = "2025-11-05T21:41:34.367Z" }, + { url = "https://files.pythonhosted.org/packages/e3/47/eb1206b7bf65970d41190b879e1723fc6bbdb2d45e53565f28991a8d9d96/rignore-0.7.6-cp311-cp311-win_arm64.whl", hash = "sha256:14b58f3da4fa3d5c3fa865cab49821675371f5e979281c683e131ae29159a581", size = 657598, upload-time = "2025-11-05T21:41:23.758Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0e/012556ef3047a2628842b44e753bb15f4dc46806780ff090f1e8fe4bf1eb/rignore-0.7.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:03e82348cb7234f8d9b2834f854400ddbbd04c0f8f35495119e66adbd37827a8", size = 883488, upload-time = "2025-11-05T20:42:41.359Z" }, + { url = "https://files.pythonhosted.org/packages/93/b0/d4f1f3fe9eb3f8e382d45ce5b0547ea01c4b7e0b4b4eb87bcd66a1d2b888/rignore-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9e624f6be6116ea682e76c5feb71ea91255c67c86cb75befe774365b2931961", size = 820411, upload-time = "2025-11-05T20:42:24.782Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c8/dea564b36dedac8de21c18e1851789545bc52a0c22ece9843444d5608a6a/rignore-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bda49950d405aa8d0ebe26af807c4e662dd281d926530f03f29690a2e07d649a", size = 897821, upload-time = "2025-11-05T20:40:52.613Z" }, + { url = "https://files.pythonhosted.org/packages/b3/2b/ee96db17ac1835e024c5d0742eefb7e46de60020385ac883dd3d1cde2c1f/rignore-0.7.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5fd5ab3840b8c16851d327ed06e9b8be6459702a53e5ab1fc4073b684b3789e", size = 873963, upload-time = "2025-11-05T20:41:07.49Z" }, + { url = "https://files.pythonhosted.org/packages/a5/8c/ad5a57bbb9d14d5c7e5960f712a8a0b902472ea3f4a2138cbf70d1777b75/rignore-0.7.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ced2a248352636a5c77504cb755dc02c2eef9a820a44d3f33061ce1bb8a7f2d2", size = 1169216, upload-time = "2025-11-05T20:41:23.73Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/5b00bc2a6bc1701e6878fca798cf5d9125eb3113193e33078b6fc0d99123/rignore-0.7.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a04a3b73b75ddc12c9c9b21efcdaab33ca3832941d6f1d67bffd860941cd448a", size = 942942, upload-time = "2025-11-05T20:41:39.393Z" }, + { url = "https://files.pythonhosted.org/packages/85/e5/7f99bd0cc9818a91d0e8b9acc65b792e35750e3bdccd15a7ee75e64efca4/rignore-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24321efac92140b7ec910ac7c53ab0f0c86a41133d2bb4b0e6a7c94967f44dd", size = 959787, upload-time = "2025-11-05T20:42:09.765Z" }, + { url = "https://files.pythonhosted.org/packages/55/54/2ffea79a7c1eabcede1926347ebc2a81bc6b81f447d05b52af9af14948b9/rignore-0.7.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c7aa109d41e593785c55fdaa89ad80b10330affa9f9d3e3a51fa695f739b20", size = 984245, upload-time = "2025-11-05T20:41:54.062Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/e80f55dfe0f35787fa482aa18689b9c8251e045076c35477deb0007b3277/rignore-0.7.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1734dc49d1e9501b07852ef44421f84d9f378da9fbeda729e77db71f49cac28b", size = 1078647, upload-time = "2025-11-05T21:40:13.463Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cf/2c64f0b6725149f7c6e7e5a909d14354889b4beaadddaa5fff023ec71084/rignore-0.7.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5719ea14ea2b652c0c0894be5dfde954e1853a80dea27dd2fbaa749618d837f5", size = 1139186, upload-time = "2025-11-05T21:40:31.27Z" }, + { url = "https://files.pythonhosted.org/packages/75/95/a86c84909ccc24af0d094b50d54697951e576c252a4d9f21b47b52af9598/rignore-0.7.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e23424fc7ce35726854f639cb7968151a792c0c3d9d082f7f67e0c362cfecca", size = 1117604, upload-time = "2025-11-05T21:40:48.07Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5e/13b249613fd5d18d58662490ab910a9f0be758981d1797789913adb4e918/rignore-0.7.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3efdcf1dd84d45f3e2bd2f93303d9be103888f56dfa7c3349b5bf4f0657ec696", size = 1127725, upload-time = "2025-11-05T21:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/c7/28/fa5dcd1e2e16982c359128664e3785f202d3eca9b22dd0b2f91c4b3d242f/rignore-0.7.6-cp312-cp312-win32.whl", hash = "sha256:ccca9d1a8b5234c76b71546fc3c134533b013f40495f394a65614a81f7387046", size = 646145, upload-time = "2025-11-05T21:41:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/26/87/69387fb5dd81a0f771936381431780b8cf66fcd2cfe9495e1aaf41548931/rignore-0.7.6-cp312-cp312-win_amd64.whl", hash = "sha256:c96a285e4a8bfec0652e0bfcf42b1aabcdda1e7625f5006d188e3b1c87fdb543", size = 726090, upload-time = "2025-11-05T21:41:36.485Z" }, + { url = "https://files.pythonhosted.org/packages/24/5f/e8418108dcda8087fb198a6f81caadbcda9fd115d61154bf0df4d6d3619b/rignore-0.7.6-cp312-cp312-win_arm64.whl", hash = "sha256:a64a750e7a8277a323f01ca50b7784a764845f6cce2fe38831cb93f0508d0051", size = 656317, upload-time = "2025-11-05T21:41:25.305Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8a/a4078f6e14932ac7edb171149c481de29969d96ddee3ece5dc4c26f9e0c3/rignore-0.7.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2bdab1d31ec9b4fb1331980ee49ea051c0d7f7bb6baa28b3125ef03cdc48fdaf", size = 883057, upload-time = "2025-11-05T20:42:42.741Z" }, + { url = "https://files.pythonhosted.org/packages/f9/8f/f8daacd177db4bf7c2223bab41e630c52711f8af9ed279be2058d2fe4982/rignore-0.7.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90f0a00ce0c866c275bf888271f1dc0d2140f29b82fcf33cdbda1e1a6af01010", size = 820150, upload-time = "2025-11-05T20:42:26.545Z" }, + { url = "https://files.pythonhosted.org/packages/36/31/b65b837e39c3f7064c426754714ac633b66b8c2290978af9d7f513e14aa9/rignore-0.7.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ad295537041dc2ed4b540fb1a3906bd9ede6ccdad3fe79770cd89e04e3c73c", size = 897406, upload-time = "2025-11-05T20:40:53.854Z" }, + { url = "https://files.pythonhosted.org/packages/ca/58/1970ce006c427e202ac7c081435719a076c478f07b3a23f469227788dc23/rignore-0.7.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f782dbd3a65a5ac85adfff69e5c6b101285ef3f845c3a3cae56a54bebf9fe116", size = 874050, upload-time = "2025-11-05T20:41:08.922Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/eb45db9f90137329072a732273be0d383cb7d7f50ddc8e0bceea34c1dfdf/rignore-0.7.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65cece3b36e5b0826d946494734c0e6aaf5a0337e18ff55b071438efe13d559e", size = 1167835, upload-time = "2025-11-05T20:41:24.997Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f1/6f1d72ddca41a64eed569680587a1236633587cc9f78136477ae69e2c88a/rignore-0.7.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7e4bb66c13cd7602dc8931822c02dfbbd5252015c750ac5d6152b186f0a8be0", size = 941945, upload-time = "2025-11-05T20:41:40.628Z" }, + { url = "https://files.pythonhosted.org/packages/48/6f/2f178af1c1a276a065f563ec1e11e7a9e23d4996fd0465516afce4b5c636/rignore-0.7.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297e500c15766e196f68aaaa70e8b6db85fa23fdc075b880d8231fdfba738cd7", size = 959067, upload-time = "2025-11-05T20:42:11.09Z" }, + { url = "https://files.pythonhosted.org/packages/5b/db/423a81c4c1e173877c7f9b5767dcaf1ab50484a94f60a0b2ed78be3fa765/rignore-0.7.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a07084211a8d35e1a5b1d32b9661a5ed20669970b369df0cf77da3adea3405de", size = 984438, upload-time = "2025-11-05T20:41:55.443Z" }, + { url = "https://files.pythonhosted.org/packages/31/eb/c4f92cc3f2825d501d3c46a244a671eb737fc1bcf7b05a3ecd34abb3e0d7/rignore-0.7.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:181eb2a975a22256a1441a9d2f15eb1292839ea3f05606620bd9e1938302cf79", size = 1078365, upload-time = "2025-11-05T21:40:15.148Z" }, + { url = "https://files.pythonhosted.org/packages/26/09/99442f02794bd7441bfc8ed1c7319e890449b816a7493b2db0e30af39095/rignore-0.7.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:7bbcdc52b5bf9f054b34ce4af5269df5d863d9c2456243338bc193c28022bd7b", size = 1139066, upload-time = "2025-11-05T21:40:32.771Z" }, + { url = "https://files.pythonhosted.org/packages/2c/88/bcfc21e520bba975410e9419450f4b90a2ac8236b9a80fd8130e87d098af/rignore-0.7.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f2e027a6da21a7c8c0d87553c24ca5cc4364def18d146057862c23a96546238e", size = 1118036, upload-time = "2025-11-05T21:40:49.646Z" }, + { url = "https://files.pythonhosted.org/packages/e2/25/d37215e4562cda5c13312636393aea0bafe38d54d4e0517520a4cc0753ec/rignore-0.7.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee4a18b82cbbc648e4aac1510066682fe62beb5dc88e2c67c53a83954e541360", size = 1127550, upload-time = "2025-11-05T21:41:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/dc/76/a264ab38bfa1620ec12a8ff1c07778da89e16d8c0f3450b0333020d3d6dc/rignore-0.7.6-cp313-cp313-win32.whl", hash = "sha256:a7d7148b6e5e95035d4390396895adc384d37ff4e06781a36fe573bba7c283e5", size = 646097, upload-time = "2025-11-05T21:41:53.201Z" }, + { url = "https://files.pythonhosted.org/packages/62/44/3c31b8983c29ea8832b6082ddb1d07b90379c2d993bd20fce4487b71b4f4/rignore-0.7.6-cp313-cp313-win_amd64.whl", hash = "sha256:b037c4b15a64dced08fc12310ee844ec2284c4c5c1ca77bc37d0a04f7bff386e", size = 726170, upload-time = "2025-11-05T21:41:38.131Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/e26a075cab83debe41a42661262f606166157df84e0e02e2d904d134c0d8/rignore-0.7.6-cp313-cp313-win_arm64.whl", hash = "sha256:e47443de9b12fe569889bdbe020abe0e0b667516ee2ab435443f6d0869bd2804", size = 656184, upload-time = "2025-11-05T21:41:27.396Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b9/1f5bd82b87e5550cd843ceb3768b4a8ef274eb63f29333cf2f29644b3d75/rignore-0.7.6-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8e41be9fa8f2f47239ded8920cc283699a052ac4c371f77f5ac017ebeed75732", size = 882632, upload-time = "2025-11-05T20:42:44.063Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6b/07714a3efe4a8048864e8a5b7db311ba51b921e15268b17defaebf56d3db/rignore-0.7.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6dc1e171e52cefa6c20e60c05394a71165663b48bca6c7666dee4f778f2a7d90", size = 820760, upload-time = "2025-11-05T20:42:27.885Z" }, + { url = "https://files.pythonhosted.org/packages/ac/0f/348c829ea2d8d596e856371b14b9092f8a5dfbb62674ec9b3f67e4939a9d/rignore-0.7.6-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ce2268837c3600f82ab8db58f5834009dc638ee17103582960da668963bebc5", size = 899044, upload-time = "2025-11-05T20:40:55.336Z" }, + { url = "https://files.pythonhosted.org/packages/f0/30/2e1841a19b4dd23878d73edd5d82e998a83d5ed9570a89675f140ca8b2ad/rignore-0.7.6-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:690a3e1b54bfe77e89c4bacb13f046e642f8baadafc61d68f5a726f324a76ab6", size = 874144, upload-time = "2025-11-05T20:41:10.195Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bf/0ce9beb2e5f64c30e3580bef09f5829236889f01511a125f98b83169b993/rignore-0.7.6-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09d12ac7a0b6210c07bcd145007117ebd8abe99c8eeb383e9e4673910c2754b2", size = 1168062, upload-time = "2025-11-05T20:41:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/b9/8b/571c178414eb4014969865317da8a02ce4cf5241a41676ef91a59aab24de/rignore-0.7.6-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a2b2b74a8c60203b08452479b90e5ce3dbe96a916214bc9eb2e5af0b6a9beb0", size = 942542, upload-time = "2025-11-05T20:41:41.838Z" }, + { url = "https://files.pythonhosted.org/packages/19/62/7a3cf601d5a45137a7e2b89d10c05b5b86499190c4b7ca5c3c47d79ee519/rignore-0.7.6-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc5a531ef02131e44359419a366bfac57f773ea58f5278c2cdd915f7d10ea94", size = 958739, upload-time = "2025-11-05T20:42:12.463Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/4261f6a0d7caf2058a5cde2f5045f565ab91aa7badc972b57d19ce58b14e/rignore-0.7.6-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7a1f77d9c4cd7e76229e252614d963442686bfe12c787a49f4fe481df49e7a9", size = 984138, upload-time = "2025-11-05T20:41:56.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/bf/628dfe19c75e8ce1f45f7c248f5148b17dfa89a817f8e3552ab74c3ae812/rignore-0.7.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ead81f728682ba72b5b1c3d5846b011d3e0174da978de87c61645f2ed36659a7", size = 1079299, upload-time = "2025-11-05T21:40:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/af/a5/be29c50f5c0c25c637ed32db8758fdf5b901a99e08b608971cda8afb293b/rignore-0.7.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:12ffd50f520c22ffdabed8cd8bfb567d9ac165b2b854d3e679f4bcaef11a9441", size = 1139618, upload-time = "2025-11-05T21:40:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/2a/40/3c46cd7ce4fa05c20b525fd60f599165e820af66e66f2c371cd50644558f/rignore-0.7.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e5a16890fbe3c894f8ca34b0fcacc2c200398d4d46ae654e03bc9b3dbf2a0a72", size = 1117626, upload-time = "2025-11-05T21:40:51.494Z" }, + { url = "https://files.pythonhosted.org/packages/8c/b9/aea926f263b8a29a23c75c2e0d8447965eb1879d3feb53cfcf84db67ed58/rignore-0.7.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3abab3bf99e8a77488ef6c7c9a799fac22224c28fe9f25cc21aa7cc2b72bfc0b", size = 1128144, upload-time = "2025-11-05T21:41:09.169Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f6/0d6242f8d0df7f2ecbe91679fefc1f75e7cd2072cb4f497abaab3f0f8523/rignore-0.7.6-cp314-cp314-win32.whl", hash = "sha256:eeef421c1782953c4375aa32f06ecae470c1285c6381eee2a30d2e02a5633001", size = 646385, upload-time = "2025-11-05T21:41:55.105Z" }, + { url = "https://files.pythonhosted.org/packages/d5/38/c0dcd7b10064f084343d6af26fe9414e46e9619c5f3224b5272e8e5d9956/rignore-0.7.6-cp314-cp314-win_amd64.whl", hash = "sha256:6aeed503b3b3d5af939b21d72a82521701a4bd3b89cd761da1e7dc78621af304", size = 725738, upload-time = "2025-11-05T21:41:39.736Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7a/290f868296c1ece914d565757ab363b04730a728b544beb567ceb3b2d96f/rignore-0.7.6-cp314-cp314-win_arm64.whl", hash = "sha256:104f215b60b3c984c386c3e747d6ab4376d5656478694e22c7bd2f788ddd8304", size = 656008, upload-time = "2025-11-05T21:41:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d2/3c74e3cd81fe8ea08a8dcd2d755c09ac2e8ad8fe409508904557b58383d3/rignore-0.7.6-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:bb24a5b947656dd94cb9e41c4bc8b23cec0c435b58be0d74a874f63c259549e8", size = 882835, upload-time = "2025-11-05T20:42:45.443Z" }, + { url = "https://files.pythonhosted.org/packages/77/61/a772a34b6b63154877433ac2d048364815b24c2dd308f76b212c408101a2/rignore-0.7.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b1e33c9501cefe24b70a1eafd9821acfd0ebf0b35c3a379430a14df089993e3", size = 820301, upload-time = "2025-11-05T20:42:29.226Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/054880b09c0b1b61d17eeb15279d8bf729c0ba52b36c3ada52fb827cbb3c/rignore-0.7.6-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bec3994665a44454df86deb762061e05cd4b61e3772f5b07d1882a8a0d2748d5", size = 897611, upload-time = "2025-11-05T20:40:56.475Z" }, + { url = "https://files.pythonhosted.org/packages/1e/40/b2d1c169f833d69931bf232600eaa3c7998ba4f9a402e43a822dad2ea9f2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26cba2edfe3cff1dfa72bddf65d316ddebf182f011f2f61538705d6dbaf54986", size = 873875, upload-time = "2025-11-05T20:41:11.561Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/ca5ae93d83a1a60e44b21d87deb48b177a8db1b85e82fc8a9abb24a8986d/rignore-0.7.6-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffa86694fec604c613696cb91e43892aa22e1fec5f9870e48f111c603e5ec4e9", size = 1167245, upload-time = "2025-11-05T20:41:28.29Z" }, + { url = "https://files.pythonhosted.org/packages/a5/52/cf3dce392ba2af806cba265aad6bcd9c48bb2a6cb5eee448d3319f6e505b/rignore-0.7.6-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48efe2ed95aa8104145004afb15cdfa02bea5cdde8b0344afeb0434f0d989aa2", size = 941750, upload-time = "2025-11-05T20:41:43.111Z" }, + { url = "https://files.pythonhosted.org/packages/ec/be/3f344c6218d779395e785091d05396dfd8b625f6aafbe502746fcd880af2/rignore-0.7.6-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dcae43eb44b7f2457fef7cc87f103f9a0013017a6f4e62182c565e924948f21", size = 958896, upload-time = "2025-11-05T20:42:13.784Z" }, + { url = "https://files.pythonhosted.org/packages/c9/34/d3fa71938aed7d00dcad87f0f9bcb02ad66c85d6ffc83ba31078ce53646a/rignore-0.7.6-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cd649a7091c0dad2f11ef65630d30c698d505cbe8660dd395268e7c099cc99f", size = 983992, upload-time = "2025-11-05T20:41:58.022Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/52a697158e9920705bdbd0748d59fa63e0f3233fb92e9df9a71afbead6ca/rignore-0.7.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42de84b0289d478d30ceb7ae59023f7b0527786a9a5b490830e080f0e4ea5aeb", size = 1078181, upload-time = "2025-11-05T21:40:18.151Z" }, + { url = "https://files.pythonhosted.org/packages/ac/65/aa76dbcdabf3787a6f0fd61b5cc8ed1e88580590556d6c0207960d2384bb/rignore-0.7.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:875a617e57b53b4acbc5a91de418233849711c02e29cc1f4f9febb2f928af013", size = 1139232, upload-time = "2025-11-05T21:40:35.966Z" }, + { url = "https://files.pythonhosted.org/packages/08/44/31b31a49b3233c6842acc1c0731aa1e7fb322a7170612acf30327f700b44/rignore-0.7.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8703998902771e96e49968105207719f22926e4431b108450f3f430b4e268b7c", size = 1117349, upload-time = "2025-11-05T21:40:53.013Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ae/1b199a2302c19c658cf74e5ee1427605234e8c91787cfba0015f2ace145b/rignore-0.7.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:602ef33f3e1b04c1e9a10a3c03f8bc3cef2d2383dcc250d309be42b49923cabc", size = 1127702, upload-time = "2025-11-05T21:41:10.881Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d3/18210222b37e87e36357f7b300b7d98c6dd62b133771e71ae27acba83a4f/rignore-0.7.6-cp314-cp314t-win32.whl", hash = "sha256:c1d8f117f7da0a4a96a8daef3da75bc090e3792d30b8b12cfadc240c631353f9", size = 647033, upload-time = "2025-11-05T21:42:00.095Z" }, + { url = "https://files.pythonhosted.org/packages/3e/87/033eebfbee3ec7d92b3bb1717d8f68c88e6fc7de54537040f3b3a405726f/rignore-0.7.6-cp314-cp314t-win_amd64.whl", hash = "sha256:ca36e59408bec81de75d307c568c2d0d410fb880b1769be43611472c61e85c96", size = 725647, upload-time = "2025-11-05T21:41:44.449Z" }, + { url = "https://files.pythonhosted.org/packages/79/62/b88e5879512c55b8ee979c666ee6902adc4ed05007226de266410ae27965/rignore-0.7.6-cp314-cp314t-win_arm64.whl", hash = "sha256:b83adabeb3e8cf662cabe1931b83e165b88c526fa6af6b3aa90429686e474896", size = 656035, upload-time = "2025-11-05T21:41:31.13Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/a6250ff0c49a3cdb943910ada4116e708118e9b901c878cfae616c80a904/rignore-0.7.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a20b6fb61bcced9a83dfcca6599ad45182b06ba720cff7c8d891e5b78db5b65f", size = 886470, upload-time = "2025-11-05T20:42:52.314Z" }, + { url = "https://files.pythonhosted.org/packages/35/af/c69c0c51b8f9f7914d95c4ea91c29a2ac067572048cae95dd6d2efdbe05d/rignore-0.7.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:392dcabfecbe176c9ebbcb40d85a5e86a5989559c4f988c2741da7daf1b5be25", size = 825976, upload-time = "2025-11-05T20:42:35.118Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d2/1b264f56132264ea609d3213ab603d6a27016b19559a1a1ede1a66a03dcd/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22baa462abdc36fdd5a5e2dae423107723351b85ff093762f9261148b9d0a04a", size = 899739, upload-time = "2025-11-05T20:41:01.518Z" }, + { url = "https://files.pythonhosted.org/packages/55/e4/b3c5dfdd8d8a10741dfe7199ef45d19a0e42d0c13aa377c83bd6caf65d90/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53fb28882d2538cb2d231972146c4927a9d9455e62b209f85d634408c4103538", size = 874843, upload-time = "2025-11-05T20:41:17.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/10/d6f3750233881a2a154cefc9a6a0a9b19da526b19f7f08221b552c6f827d/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87409f7eeb1103d6b77f3472a3a0d9a5953e3ae804a55080bdcb0120ee43995b", size = 1170348, upload-time = "2025-11-05T20:41:34.21Z" }, + { url = "https://files.pythonhosted.org/packages/6e/10/ad98ca05c9771c15af734cee18114a3c280914b6e34fde9ffea2e61e88aa/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:684014e42e4341ab3ea23a203551857fcc03a7f8ae96ca3aefb824663f55db32", size = 942315, upload-time = "2025-11-05T20:41:48.508Z" }, + { url = "https://files.pythonhosted.org/packages/de/00/ab5c0f872acb60d534e687e629c17e0896c62da9b389c66d3aa16b817aa8/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77356ebb01ba13f8a425c3d30fcad40e57719c0e37670d022d560884a30e4767", size = 961047, upload-time = "2025-11-05T20:42:19.403Z" }, + { url = "https://files.pythonhosted.org/packages/b8/86/3030fdc363a8f0d1cd155b4c453d6db9bab47a24fcc64d03f61d9d78fe6a/rignore-0.7.6-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cbd8a48abbd3747a6c830393cd578782fab5d43f4deea48c5f5e344b8fed2b0", size = 986090, upload-time = "2025-11-05T20:42:03.581Z" }, + { url = "https://files.pythonhosted.org/packages/33/b8/133aa4002cee0ebbb39362f94e4898eec7fbd09cec9fcbce1cd65b355b7f/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2673225dcec7f90497e79438c35e34638d0d0391ccea3cbb79bfb9adc0dc5bd7", size = 1079656, upload-time = "2025-11-05T21:40:24.89Z" }, + { url = "https://files.pythonhosted.org/packages/67/56/36d5d34210e5e7dfcd134eed8335b19e80ae940ee758f493e4f2b344dd70/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:c081f17290d8a2b96052b79207622aa635686ea39d502b976836384ede3d303c", size = 1139789, upload-time = "2025-11-05T21:40:42.119Z" }, + { url = "https://files.pythonhosted.org/packages/6b/5b/bb4f9420802bf73678033a4a55ab1bede36ce2e9b41fec5f966d83d932b3/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:57e8327aacc27f921968cb2a174f9e47b084ce9a7dd0122c8132d22358f6bd79", size = 1120308, upload-time = "2025-11-05T21:40:59.402Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8b/a1299085b28a2f6135e30370b126e3c5055b61908622f2488ade67641479/rignore-0.7.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d8955b57e42f2a5434670d5aa7b75eaf6e74602ccd8955dddf7045379cd762fb", size = 1129444, upload-time = "2025-11-05T21:41:17.906Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/05/d52bf1e65044b4e5e27d4e63e8d1579dbdec54fce685908ae09bc3720030/s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf", size = 150589, upload-time = "2025-07-18T19:22:42.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/4f/d073e09df851cfa251ef7840007d04db3293a0482ce607d2b993926089be/s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724", size = 85308, upload-time = "2025-07-18T19:22:40.947Z" }, +] + +[[package]] +name = "safetensors" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/cc/738f3011628920e027a11754d9cae9abec1aed00f7ae860abbf843755233/safetensors-0.6.2.tar.gz", hash = "sha256:43ff2aa0e6fa2dc3ea5524ac7ad93a9839256b8703761e76e2d0b2a3fa4f15d9", size = 197968, upload-time = "2025-08-08T13:13:58.654Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/b1/3f5fd73c039fc87dba3ff8b5d528bfc5a32b597fea8e7a6a4800343a17c7/safetensors-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:9c85ede8ec58f120bad982ec47746981e210492a6db876882aa021446af8ffba", size = 454797, upload-time = "2025-08-08T13:13:52.066Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c9/bb114c158540ee17907ec470d01980957fdaf87b4aa07914c24eba87b9c6/safetensors-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d6675cf4b39c98dbd7d940598028f3742e0375a6b4d4277e76beb0c35f4b843b", size = 432206, upload-time = "2025-08-08T13:13:50.931Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/f70c34e47df3110e8e0bb268d90db8d4be8958a54ab0336c9be4fe86dac8/safetensors-0.6.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d2d2b3ce1e2509c68932ca03ab8f20570920cd9754b05063d4368ee52833ecd", size = 473261, upload-time = "2025-08-08T13:13:41.259Z" }, + { url = "https://files.pythonhosted.org/packages/2a/f5/be9c6a7c7ef773e1996dc214e73485286df1836dbd063e8085ee1976f9cb/safetensors-0.6.2-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:93de35a18f46b0f5a6a1f9e26d91b442094f2df02e9fd7acf224cfec4238821a", size = 485117, upload-time = "2025-08-08T13:13:43.506Z" }, + { url = "https://files.pythonhosted.org/packages/c9/55/23f2d0a2c96ed8665bf17a30ab4ce5270413f4d74b6d87dd663258b9af31/safetensors-0.6.2-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89a89b505f335640f9120fac65ddeb83e40f1fd081cb8ed88b505bdccec8d0a1", size = 616154, upload-time = "2025-08-08T13:13:45.096Z" }, + { url = "https://files.pythonhosted.org/packages/98/c6/affb0bd9ce02aa46e7acddbe087912a04d953d7a4d74b708c91b5806ef3f/safetensors-0.6.2-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4d0d0b937e04bdf2ae6f70cd3ad51328635fe0e6214aa1fc811f3b576b3bda", size = 520713, upload-time = "2025-08-08T13:13:46.25Z" }, + { url = "https://files.pythonhosted.org/packages/fe/5d/5a514d7b88e310c8b146e2404e0dc161282e78634d9358975fd56dfd14be/safetensors-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8045db2c872db8f4cbe3faa0495932d89c38c899c603f21e9b6486951a5ecb8f", size = 485835, upload-time = "2025-08-08T13:13:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/7a/7b/4fc3b2ba62c352b2071bea9cfbad330fadda70579f617506ae1a2f129cab/safetensors-0.6.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:81e67e8bab9878bb568cffbc5f5e655adb38d2418351dc0859ccac158f753e19", size = 521503, upload-time = "2025-08-08T13:13:47.651Z" }, + { url = "https://files.pythonhosted.org/packages/5a/50/0057e11fe1f3cead9254315a6c106a16dd4b1a19cd247f7cc6414f6b7866/safetensors-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0e4d029ab0a0e0e4fdf142b194514695b1d7d3735503ba700cf36d0fc7136ce", size = 652256, upload-time = "2025-08-08T13:13:53.167Z" }, + { url = "https://files.pythonhosted.org/packages/e9/29/473f789e4ac242593ac1656fbece6e1ecd860bb289e635e963667807afe3/safetensors-0.6.2-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:fa48268185c52bfe8771e46325a1e21d317207bcabcb72e65c6e28e9ffeb29c7", size = 747281, upload-time = "2025-08-08T13:13:54.656Z" }, + { url = "https://files.pythonhosted.org/packages/68/52/f7324aad7f2df99e05525c84d352dc217e0fa637a4f603e9f2eedfbe2c67/safetensors-0.6.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:d83c20c12c2d2f465997c51b7ecb00e407e5f94d7dec3ea0cc11d86f60d3fde5", size = 692286, upload-time = "2025-08-08T13:13:55.884Z" }, + { url = "https://files.pythonhosted.org/packages/ad/fe/cad1d9762868c7c5dc70c8620074df28ebb1a8e4c17d4c0cb031889c457e/safetensors-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d944cea65fad0ead848b6ec2c37cc0b197194bec228f8020054742190e9312ac", size = 655957, upload-time = "2025-08-08T13:13:57.029Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/e2158e17bbe57d104f0abbd95dff60dda916cf277c9f9663b4bf9bad8b6e/safetensors-0.6.2-cp38-abi3-win32.whl", hash = "sha256:cab75ca7c064d3911411461151cb69380c9225798a20e712b102edda2542ddb1", size = 308926, upload-time = "2025-08-08T13:14:01.095Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c3/c0be1135726618dc1e28d181b8c442403d8dbb9e273fd791de2d4384bcdd/safetensors-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:c7b214870df923cbc1593c3faee16bec59ea462758699bd3fee399d00aac072c", size = 320192, upload-time = "2025-08-08T13:13:59.467Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/83/564e141eef908a5863a54da8ca342a137f45a0bfb71d1d79704c9894c9d1/scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e", size = 9331967, upload-time = "2025-09-09T08:20:32.421Z" }, + { url = "https://files.pythonhosted.org/packages/18/d6/ba863a4171ac9d7314c4d3fc251f015704a2caeee41ced89f321c049ed83/scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1", size = 8648645, upload-time = "2025-09-09T08:20:34.436Z" }, + { url = "https://files.pythonhosted.org/packages/ef/0e/97dbca66347b8cf0ea8b529e6bb9367e337ba2e8be0ef5c1a545232abfde/scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d", size = 9715424, upload-time = "2025-09-09T08:20:36.776Z" }, + { url = "https://files.pythonhosted.org/packages/f7/32/1f3b22e3207e1d2c883a7e09abb956362e7d1bd2f14458c7de258a26ac15/scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1", size = 9509234, upload-time = "2025-09-09T08:20:38.957Z" }, + { url = "https://files.pythonhosted.org/packages/9f/71/34ddbd21f1da67c7a768146968b4d0220ee6831e4bcbad3e03dd3eae88b6/scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1", size = 8894244, upload-time = "2025-09-09T08:20:41.166Z" }, + { url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818, upload-time = "2025-09-09T08:20:43.19Z" }, + { url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997, upload-time = "2025-09-09T08:20:45.468Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381, upload-time = "2025-09-09T08:20:47.982Z" }, + { url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296, upload-time = "2025-09-09T08:20:50.366Z" }, + { url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290", size = 8731256, upload-time = "2025-09-09T08:20:52.627Z" }, + { url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382, upload-time = "2025-09-09T08:20:54.731Z" }, + { url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042, upload-time = "2025-09-09T08:20:57.313Z" }, + { url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180, upload-time = "2025-09-09T08:20:59.671Z" }, + { url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660, upload-time = "2025-09-09T08:21:01.71Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c", size = 8702057, upload-time = "2025-09-09T08:21:04.234Z" }, + { url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731, upload-time = "2025-09-09T08:21:06.381Z" }, + { url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852, upload-time = "2025-09-09T08:21:08.628Z" }, + { url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094, upload-time = "2025-09-09T08:21:11.486Z" }, + { url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436, upload-time = "2025-09-09T08:21:13.602Z" }, + { url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973", size = 9275749, upload-time = "2025-09-09T08:21:15.96Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906, upload-time = "2025-09-09T08:21:18.557Z" }, + { url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836, upload-time = "2025-09-09T08:21:20.695Z" }, + { url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236, upload-time = "2025-09-09T08:21:22.645Z" }, + { url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593, upload-time = "2025-09-09T08:21:24.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8", size = 8820007, upload-time = "2025-09-09T08:21:26.713Z" }, +] + +[[package]] +name = "scipy" +version = "1.16.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/3b/546a6f0bfe791bbb7f8d591613454d15097e53f906308ec6f7c1ce588e8e/scipy-1.16.2.tar.gz", hash = "sha256:af029b153d243a80afb6eabe40b0a07f8e35c9adc269c019f364ad747f826a6b", size = 30580599, upload-time = "2025-09-11T17:48:08.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/ef/37ed4b213d64b48422df92560af7300e10fe30b5d665dd79932baebee0c6/scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92", size = 36619956, upload-time = "2025-09-11T17:39:20.5Z" }, + { url = "https://files.pythonhosted.org/packages/85/ab/5c2eba89b9416961a982346a4d6a647d78c91ec96ab94ed522b3b6baf444/scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e", size = 28931117, upload-time = "2025-09-11T17:39:29.06Z" }, + { url = "https://files.pythonhosted.org/packages/80/d1/eed51ab64d227fe60229a2d57fb60ca5898cfa50ba27d4f573e9e5f0b430/scipy-1.16.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:87eb178db04ece7c698220d523c170125dbffebb7af0345e66c3554f6f60c173", size = 20921997, upload-time = "2025-09-11T17:39:34.892Z" }, + { url = "https://files.pythonhosted.org/packages/be/7c/33ea3e23bbadde96726edba6bf9111fb1969d14d9d477ffa202c67bec9da/scipy-1.16.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:4e409eac067dcee96a57fbcf424c13f428037827ec7ee3cb671ff525ca4fc34d", size = 23523374, upload-time = "2025-09-11T17:39:40.846Z" }, + { url = "https://files.pythonhosted.org/packages/96/0b/7399dc96e1e3f9a05e258c98d716196a34f528eef2ec55aad651ed136d03/scipy-1.16.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e574be127bb760f0dad24ff6e217c80213d153058372362ccb9555a10fc5e8d2", size = 33583702, upload-time = "2025-09-11T17:39:49.011Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bc/a5c75095089b96ea72c1bd37a4497c24b581ec73db4ef58ebee142ad2d14/scipy-1.16.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f5db5ba6188d698ba7abab982ad6973265b74bb40a1efe1821b58c87f73892b9", size = 35883427, upload-time = "2025-09-11T17:39:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/ab/66/e25705ca3d2b87b97fe0a278a24b7f477b4023a926847935a1a71488a6a6/scipy-1.16.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec6e74c4e884104ae006d34110677bfe0098203a3fec2f3faf349f4cb05165e3", size = 36212940, upload-time = "2025-09-11T17:40:06.013Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fd/0bb911585e12f3abdd603d721d83fc1c7492835e1401a0e6d498d7822b4b/scipy-1.16.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:912f46667d2d3834bc3d57361f854226475f695eb08c08a904aadb1c936b6a88", size = 38865092, upload-time = "2025-09-11T17:40:15.143Z" }, + { url = "https://files.pythonhosted.org/packages/d6/73/c449a7d56ba6e6f874183759f8483cde21f900a8be117d67ffbb670c2958/scipy-1.16.2-cp311-cp311-win_amd64.whl", hash = "sha256:91e9e8a37befa5a69e9cacbe0bcb79ae5afb4a0b130fd6db6ee6cc0d491695fa", size = 38687626, upload-time = "2025-09-11T17:40:24.041Z" }, + { url = "https://files.pythonhosted.org/packages/68/72/02f37316adf95307f5d9e579023c6899f89ff3a051fa079dbd6faafc48e5/scipy-1.16.2-cp311-cp311-win_arm64.whl", hash = "sha256:f3bf75a6dcecab62afde4d1f973f1692be013110cad5338007927db8da73249c", size = 25503506, upload-time = "2025-09-11T17:40:30.703Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8d/6396e00db1282279a4ddd507c5f5e11f606812b608ee58517ce8abbf883f/scipy-1.16.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:89d6c100fa5c48472047632e06f0876b3c4931aac1f4291afc81a3644316bb0d", size = 36646259, upload-time = "2025-09-11T17:40:39.329Z" }, + { url = "https://files.pythonhosted.org/packages/3b/93/ea9edd7e193fceb8eef149804491890bde73fb169c896b61aa3e2d1e4e77/scipy-1.16.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ca748936cd579d3f01928b30a17dc474550b01272d8046e3e1ee593f23620371", size = 28888976, upload-time = "2025-09-11T17:40:46.82Z" }, + { url = "https://files.pythonhosted.org/packages/91/4d/281fddc3d80fd738ba86fd3aed9202331180b01e2c78eaae0642f22f7e83/scipy-1.16.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:fac4f8ce2ddb40e2e3d0f7ec36d2a1e7f92559a2471e59aec37bd8d9de01fec0", size = 20879905, upload-time = "2025-09-11T17:40:52.545Z" }, + { url = "https://files.pythonhosted.org/packages/69/40/b33b74c84606fd301b2915f0062e45733c6ff5708d121dd0deaa8871e2d0/scipy-1.16.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:033570f1dcefd79547a88e18bccacff025c8c647a330381064f561d43b821232", size = 23553066, upload-time = "2025-09-11T17:40:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/55/a7/22c739e2f21a42cc8f16bc76b47cff4ed54fbe0962832c589591c2abec34/scipy-1.16.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ea3421209bf00c8a5ef2227de496601087d8f638a2363ee09af059bd70976dc1", size = 33336407, upload-time = "2025-09-11T17:41:06.796Z" }, + { url = "https://files.pythonhosted.org/packages/53/11/a0160990b82999b45874dc60c0c183d3a3a969a563fffc476d5a9995c407/scipy-1.16.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f66bd07ba6f84cd4a380b41d1bf3c59ea488b590a2ff96744845163309ee8e2f", size = 35673281, upload-time = "2025-09-11T17:41:15.055Z" }, + { url = "https://files.pythonhosted.org/packages/96/53/7ef48a4cfcf243c3d0f1643f5887c81f29fdf76911c4e49331828e19fc0a/scipy-1.16.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e9feab931bd2aea4a23388c962df6468af3d808ddf2d40f94a81c5dc38f32ef", size = 36004222, upload-time = "2025-09-11T17:41:23.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7f/71a69e0afd460049d41c65c630c919c537815277dfea214031005f474d78/scipy-1.16.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03dfc75e52f72cf23ec2ced468645321407faad8f0fe7b1f5b49264adbc29cb1", size = 38664586, upload-time = "2025-09-11T17:41:31.021Z" }, + { url = "https://files.pythonhosted.org/packages/34/95/20e02ca66fb495a95fba0642fd48e0c390d0ece9b9b14c6e931a60a12dea/scipy-1.16.2-cp312-cp312-win_amd64.whl", hash = "sha256:0ce54e07bbb394b417457409a64fd015be623f36e330ac49306433ffe04bc97e", size = 38550641, upload-time = "2025-09-11T17:41:36.61Z" }, + { url = "https://files.pythonhosted.org/packages/92/ad/13646b9beb0a95528ca46d52b7babafbe115017814a611f2065ee4e61d20/scipy-1.16.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a8ffaa4ac0df81a0b94577b18ee079f13fecdb924df3328fc44a7dc5ac46851", size = 25456070, upload-time = "2025-09-11T17:41:41.3Z" }, + { url = "https://files.pythonhosted.org/packages/c1/27/c5b52f1ee81727a9fc457f5ac1e9bf3d6eab311805ea615c83c27ba06400/scipy-1.16.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:84f7bf944b43e20b8a894f5fe593976926744f6c185bacfcbdfbb62736b5cc70", size = 36604856, upload-time = "2025-09-11T17:41:47.695Z" }, + { url = "https://files.pythonhosted.org/packages/32/a9/15c20d08e950b540184caa8ced675ba1128accb0e09c653780ba023a4110/scipy-1.16.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:5c39026d12edc826a1ef2ad35ad1e6d7f087f934bb868fc43fa3049c8b8508f9", size = 28864626, upload-time = "2025-09-11T17:41:52.642Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fc/ea36098df653cca26062a627c1a94b0de659e97127c8491e18713ca0e3b9/scipy-1.16.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e52729ffd45b68777c5319560014d6fd251294200625d9d70fd8626516fc49f5", size = 20855689, upload-time = "2025-09-11T17:41:57.886Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6f/d0b53be55727f3e6d7c72687ec18ea6d0047cf95f1f77488b99a2bafaee1/scipy-1.16.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:024dd4a118cccec09ca3209b7e8e614931a6ffb804b2a601839499cb88bdf925", size = 23512151, upload-time = "2025-09-11T17:42:02.303Z" }, + { url = "https://files.pythonhosted.org/packages/11/85/bf7dab56e5c4b1d3d8eef92ca8ede788418ad38a7dc3ff50262f00808760/scipy-1.16.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7a5dc7ee9c33019973a470556081b0fd3c9f4c44019191039f9769183141a4d9", size = 33329824, upload-time = "2025-09-11T17:42:07.549Z" }, + { url = "https://files.pythonhosted.org/packages/da/6a/1a927b14ddc7714111ea51f4e568203b2bb6ed59bdd036d62127c1a360c8/scipy-1.16.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c2275ff105e508942f99d4e3bc56b6ef5e4b3c0af970386ca56b777608ce95b7", size = 35681881, upload-time = "2025-09-11T17:42:13.255Z" }, + { url = "https://files.pythonhosted.org/packages/c1/5f/331148ea5780b4fcc7007a4a6a6ee0a0c1507a796365cc642d4d226e1c3a/scipy-1.16.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:af80196eaa84f033e48444d2e0786ec47d328ba00c71e4299b602235ffef9acb", size = 36006219, upload-time = "2025-09-11T17:42:18.765Z" }, + { url = "https://files.pythonhosted.org/packages/46/3a/e991aa9d2aec723b4a8dcfbfc8365edec5d5e5f9f133888067f1cbb7dfc1/scipy-1.16.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9fb1eb735fe3d6ed1f89918224e3385fbf6f9e23757cacc35f9c78d3b712dd6e", size = 38682147, upload-time = "2025-09-11T17:42:25.177Z" }, + { url = "https://files.pythonhosted.org/packages/a1/57/0f38e396ad19e41b4c5db66130167eef8ee620a49bc7d0512e3bb67e0cab/scipy-1.16.2-cp313-cp313-win_amd64.whl", hash = "sha256:fda714cf45ba43c9d3bae8f2585c777f64e3f89a2e073b668b32ede412d8f52c", size = 38520766, upload-time = "2025-09-11T17:43:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a5/85d3e867b6822d331e26c862a91375bb7746a0b458db5effa093d34cdb89/scipy-1.16.2-cp313-cp313-win_arm64.whl", hash = "sha256:2f5350da923ccfd0b00e07c3e5cfb316c1c0d6c1d864c07a72d092e9f20db104", size = 25451169, upload-time = "2025-09-11T17:43:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/09/d9/60679189bcebda55992d1a45498de6d080dcaf21ce0c8f24f888117e0c2d/scipy-1.16.2-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:53d8d2ee29b925344c13bda64ab51785f016b1b9617849dac10897f0701b20c1", size = 37012682, upload-time = "2025-09-11T17:42:30.677Z" }, + { url = "https://files.pythonhosted.org/packages/83/be/a99d13ee4d3b7887a96f8c71361b9659ba4ef34da0338f14891e102a127f/scipy-1.16.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:9e05e33657efb4c6a9d23bd8300101536abd99c85cca82da0bffff8d8764d08a", size = 29389926, upload-time = "2025-09-11T17:42:35.845Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0a/130164a4881cec6ca8c00faf3b57926f28ed429cd6001a673f83c7c2a579/scipy-1.16.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:7fe65b36036357003b3ef9d37547abeefaa353b237e989c21027b8ed62b12d4f", size = 21381152, upload-time = "2025-09-11T17:42:40.07Z" }, + { url = "https://files.pythonhosted.org/packages/47/a6/503ffb0310ae77fba874e10cddfc4a1280bdcca1d13c3751b8c3c2996cf8/scipy-1.16.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6406d2ac6d40b861cccf57f49592f9779071655e9f75cd4f977fa0bdd09cb2e4", size = 23914410, upload-time = "2025-09-11T17:42:44.313Z" }, + { url = "https://files.pythonhosted.org/packages/fa/c7/1147774bcea50d00c02600aadaa919facbd8537997a62496270133536ed6/scipy-1.16.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff4dc42bd321991fbf611c23fc35912d690f731c9914bf3af8f417e64aca0f21", size = 33481880, upload-time = "2025-09-11T17:42:49.325Z" }, + { url = "https://files.pythonhosted.org/packages/6a/74/99d5415e4c3e46b2586f30cdbecb95e101c7192628a484a40dd0d163811a/scipy-1.16.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:654324826654d4d9133e10675325708fb954bc84dae6e9ad0a52e75c6b1a01d7", size = 35791425, upload-time = "2025-09-11T17:42:54.711Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ee/a6559de7c1cc710e938c0355d9d4fbcd732dac4d0d131959d1f3b63eb29c/scipy-1.16.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63870a84cd15c44e65220eaed2dac0e8f8b26bbb991456a033c1d9abfe8a94f8", size = 36178622, upload-time = "2025-09-11T17:43:00.375Z" }, + { url = "https://files.pythonhosted.org/packages/4e/7b/f127a5795d5ba8ece4e0dce7d4a9fb7cb9e4f4757137757d7a69ab7d4f1a/scipy-1.16.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:fa01f0f6a3050fa6a9771a95d5faccc8e2f5a92b4a2e5440a0fa7264a2398472", size = 38783985, upload-time = "2025-09-11T17:43:06.661Z" }, + { url = "https://files.pythonhosted.org/packages/3e/9f/bc81c1d1e033951eb5912cd3750cc005943afa3e65a725d2443a3b3c4347/scipy-1.16.2-cp313-cp313t-win_amd64.whl", hash = "sha256:116296e89fba96f76353a8579820c2512f6e55835d3fad7780fece04367de351", size = 38631367, upload-time = "2025-09-11T17:43:14.44Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5e/2cc7555fd81d01814271412a1d59a289d25f8b63208a0a16c21069d55d3e/scipy-1.16.2-cp313-cp313t-win_arm64.whl", hash = "sha256:98e22834650be81d42982360382b43b17f7ba95e0e6993e2a4f5b9ad9283a94d", size = 25787992, upload-time = "2025-09-11T17:43:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ac/ad8951250516db71619f0bd3b2eb2448db04b720a003dd98619b78b692c0/scipy-1.16.2-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:567e77755019bb7461513c87f02bb73fb65b11f049aaaa8ca17cfaa5a5c45d77", size = 36595109, upload-time = "2025-09-11T17:43:35.713Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f6/5779049ed119c5b503b0f3dc6d6f3f68eefc3a9190d4ad4c276f854f051b/scipy-1.16.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:17d9bb346194e8967296621208fcdfd39b55498ef7d2f376884d5ac47cec1a70", size = 28859110, upload-time = "2025-09-11T17:43:40.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/09/9986e410ae38bf0a0c737ff8189ac81a93b8e42349aac009891c054403d7/scipy-1.16.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:0a17541827a9b78b777d33b623a6dcfe2ef4a25806204d08ead0768f4e529a88", size = 20850110, upload-time = "2025-09-11T17:43:44.981Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ad/485cdef2d9215e2a7df6d61b81d2ac073dfacf6ae24b9ae87274c4e936ae/scipy-1.16.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:d7d4c6ba016ffc0f9568d012f5f1eb77ddd99412aea121e6fa8b4c3b7cbad91f", size = 23497014, upload-time = "2025-09-11T17:43:49.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/74/f6a852e5d581122b8f0f831f1d1e32fb8987776ed3658e95c377d308ed86/scipy-1.16.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9702c4c023227785c779cba2e1d6f7635dbb5b2e0936cdd3a4ecb98d78fd41eb", size = 33401155, upload-time = "2025-09-11T17:43:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f5/61d243bbc7c6e5e4e13dde9887e84a5cbe9e0f75fd09843044af1590844e/scipy-1.16.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d1cdf0ac28948d225decdefcc45ad7dd91716c29ab56ef32f8e0d50657dffcc7", size = 35691174, upload-time = "2025-09-11T17:44:00.101Z" }, + { url = "https://files.pythonhosted.org/packages/03/99/59933956331f8cc57e406cdb7a483906c74706b156998f322913e789c7e1/scipy-1.16.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:70327d6aa572a17c2941cdfb20673f82e536e91850a2e4cb0c5b858b690e1548", size = 36070752, upload-time = "2025-09-11T17:44:05.619Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7d/00f825cfb47ee19ef74ecf01244b43e95eae74e7e0ff796026ea7cd98456/scipy-1.16.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5221c0b2a4b58aa7c4ed0387d360fd90ee9086d383bb34d9f2789fafddc8a936", size = 38701010, upload-time = "2025-09-11T17:44:11.322Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9f/b62587029980378304ba5a8563d376c96f40b1e133daacee76efdcae32de/scipy-1.16.2-cp314-cp314-win_amd64.whl", hash = "sha256:f5a85d7b2b708025af08f060a496dd261055b617d776fc05a1a1cc69e09fe9ff", size = 39360061, upload-time = "2025-09-11T17:45:09.814Z" }, + { url = "https://files.pythonhosted.org/packages/82/04/7a2f1609921352c7fbee0815811b5050582f67f19983096c4769867ca45f/scipy-1.16.2-cp314-cp314-win_arm64.whl", hash = "sha256:2cc73a33305b4b24556957d5857d6253ce1e2dcd67fa0ff46d87d1670b3e1e1d", size = 26126914, upload-time = "2025-09-11T17:45:14.73Z" }, + { url = "https://files.pythonhosted.org/packages/51/b9/60929ce350c16b221928725d2d1d7f86cf96b8bc07415547057d1196dc92/scipy-1.16.2-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:9ea2a3fed83065d77367775d689401a703d0f697420719ee10c0780bcab594d8", size = 37013193, upload-time = "2025-09-11T17:44:16.757Z" }, + { url = "https://files.pythonhosted.org/packages/2a/41/ed80e67782d4bc5fc85a966bc356c601afddd175856ba7c7bb6d9490607e/scipy-1.16.2-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:7280d926f11ca945c3ef92ba960fa924e1465f8d07ce3a9923080363390624c4", size = 29390172, upload-time = "2025-09-11T17:44:21.783Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a3/2f673ace4090452696ccded5f5f8efffb353b8f3628f823a110e0170b605/scipy-1.16.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:8afae1756f6a1fe04636407ef7dbece33d826a5d462b74f3d0eb82deabefd831", size = 21381326, upload-time = "2025-09-11T17:44:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/42/bf/59df61c5d51395066c35836b78136accf506197617c8662e60ea209881e1/scipy-1.16.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:5c66511f29aa8d233388e7416a3f20d5cae7a2744d5cee2ecd38c081f4e861b3", size = 23915036, upload-time = "2025-09-11T17:44:30.527Z" }, + { url = "https://files.pythonhosted.org/packages/91/c3/edc7b300dc16847ad3672f1a6f3f7c5d13522b21b84b81c265f4f2760d4a/scipy-1.16.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efe6305aeaa0e96b0ccca5ff647a43737d9a092064a3894e46c414db84bc54ac", size = 33484341, upload-time = "2025-09-11T17:44:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/26/c7/24d1524e72f06ff141e8d04b833c20db3021020563272ccb1b83860082a9/scipy-1.16.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7f3a337d9ae06a1e8d655ee9d8ecb835ea5ddcdcbd8d23012afa055ab014f374", size = 35790840, upload-time = "2025-09-11T17:44:41.76Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b7/5aaad984eeedd56858dc33d75efa59e8ce798d918e1033ef62d2708f2c3d/scipy-1.16.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bab3605795d269067d8ce78a910220262711b753de8913d3deeaedb5dded3bb6", size = 36174716, upload-time = "2025-09-11T17:44:47.316Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c2/e276a237acb09824822b0ada11b028ed4067fdc367a946730979feacb870/scipy-1.16.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b0348d8ddb55be2a844c518cd8cc8deeeb8aeba707cf834db5758fc89b476a2c", size = 38790088, upload-time = "2025-09-11T17:44:53.011Z" }, + { url = "https://files.pythonhosted.org/packages/c6/b4/5c18a766e8353015439f3780f5fc473f36f9762edc1a2e45da3ff5a31b21/scipy-1.16.2-cp314-cp314t-win_amd64.whl", hash = "sha256:26284797e38b8a75e14ea6631d29bda11e76ceaa6ddb6fdebbfe4c4d90faf2f9", size = 39457455, upload-time = "2025-09-11T17:44:58.899Z" }, + { url = "https://files.pythonhosted.org/packages/97/30/2f9a5243008f76dfc5dee9a53dfb939d9b31e16ce4bd4f2e628bfc5d89d2/scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779", size = 26448374, upload-time = "2025-09-11T17:45:03.45Z" }, +] + +[[package]] +name = "scramp" +version = "1.4.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asn1crypto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/77/6db18bab446c12cfbee22ca8f65d5b187966bd8f900aeb65db9e60d4be3d/scramp-1.4.6.tar.gz", hash = "sha256:fe055ebbebf4397b9cb323fcc4b299f219cd1b03fd673ca40c97db04ac7d107e", size = 16306, upload-time = "2025-07-05T14:44:03.977Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/bf/54b5d40bea1c1805175ead2d496c267f05eec87561687dd73ab76869d8d9/scramp-1.4.6-py3-none-any.whl", hash = "sha256:a0cf9d2b4624b69bac5432dd69fecfc55a542384fe73c3a23ed9b138cda484e1", size = 12812, upload-time = "2025-07-05T14:44:02.345Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.47.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4a/2a/d225cbf87b6c8ecce5664db7bcecb82c317e448e3b24a2dcdaacb18ca9a7/sentry_sdk-2.47.0.tar.gz", hash = "sha256:8218891d5e41b4ea8d61d2aed62ed10c80e39d9f2959d6f939efbf056857e050", size = 381895, upload-time = "2025-12-03T14:06:36.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/ac/d6286ea0d49e7b58847faf67b00e56bb4ba3d525281e2ac306e1f1f353da/sentry_sdk-2.47.0-py2.py3-none-any.whl", hash = "sha256:d72f8c61025b7d1d9e52510d03a6247b280094a327dd900d987717a4fce93412", size = 411088, upload-time = "2025-12-03T14:06:35.374Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "shapely" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" }, + { url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" }, + { url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" }, + { url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" }, + { url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" }, + { url = "https://files.pythonhosted.org/packages/c3/90/98ef257c23c46425dc4d1d31005ad7c8d649fe423a38b917db02c30f1f5a/shapely-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b510dda1a3672d6879beb319bc7c5fd302c6c354584690973c838f46ec3e0fa8", size = 1832644, upload-time = "2025-09-24T13:50:44.886Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ab/0bee5a830d209adcd3a01f2d4b70e587cdd9fd7380d5198c064091005af8/shapely-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cff473e81017594d20ec55d86b54bc635544897e13a7cfc12e36909c5309a2a", size = 1642887, upload-time = "2025-09-24T13:50:46.735Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5e/7d7f54ba960c13302584c73704d8c4d15404a51024631adb60b126a4ae88/shapely-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe7b77dc63d707c09726b7908f575fc04ff1d1ad0f3fb92aec212396bc6cfe5e", size = 2970931, upload-time = "2025-09-24T13:50:48.374Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a2/83fc37e2a58090e3d2ff79175a95493c664bcd0b653dd75cb9134645a4e5/shapely-2.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7ed1a5bbfb386ee8332713bf7508bc24e32d24b74fc9a7b9f8529a55db9f4ee6", size = 3082855, upload-time = "2025-09-24T13:50:50.037Z" }, + { url = "https://files.pythonhosted.org/packages/44/2b/578faf235a5b09f16b5f02833c53822294d7f21b242f8e2d0cf03fb64321/shapely-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a84e0582858d841d54355246ddfcbd1fce3179f185da7470f41ce39d001ee1af", size = 3979960, upload-time = "2025-09-24T13:50:51.74Z" }, + { url = "https://files.pythonhosted.org/packages/4d/04/167f096386120f692cc4ca02f75a17b961858997a95e67a3cb6a7bbd6b53/shapely-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc3487447a43d42adcdf52d7ac73804f2312cbfa5d433a7d2c506dcab0033dfd", size = 4142851, upload-time = "2025-09-24T13:50:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/48/74/fb402c5a6235d1c65a97348b48cdedb75fb19eca2b1d66d04969fc1c6091/shapely-2.1.2-cp313-cp313-win32.whl", hash = "sha256:9c3a3c648aedc9f99c09263b39f2d8252f199cb3ac154fadc173283d7d111350", size = 1541890, upload-time = "2025-09-24T13:50:55.337Z" }, + { url = "https://files.pythonhosted.org/packages/41/47/3647fe7ad990af60ad98b889657a976042c9988c2807cf322a9d6685f462/shapely-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:ca2591bff6645c216695bdf1614fca9c82ea1144d4a7591a466fef64f28f0715", size = 1722151, upload-time = "2025-09-24T13:50:57.153Z" }, + { url = "https://files.pythonhosted.org/packages/3c/49/63953754faa51ffe7d8189bfbe9ca34def29f8c0e34c67cbe2a2795f269d/shapely-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2d93d23bdd2ed9dc157b46bc2f19b7da143ca8714464249bef6771c679d5ff40", size = 1834130, upload-time = "2025-09-24T13:50:58.49Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ee/dce001c1984052970ff60eb4727164892fb2d08052c575042a47f5a9e88f/shapely-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:01d0d304b25634d60bd7cf291828119ab55a3bab87dc4af1e44b07fb225f188b", size = 1642802, upload-time = "2025-09-24T13:50:59.871Z" }, + { url = "https://files.pythonhosted.org/packages/da/e7/fc4e9a19929522877fa602f705706b96e78376afb7fad09cad5b9af1553c/shapely-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8d8382dd120d64b03698b7298b89611a6ea6f55ada9d39942838b79c9bc89801", size = 3018460, upload-time = "2025-09-24T13:51:02.08Z" }, + { url = "https://files.pythonhosted.org/packages/a1/18/7519a25db21847b525696883ddc8e6a0ecaa36159ea88e0fef11466384d0/shapely-2.1.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:19efa3611eef966e776183e338b2d7ea43569ae99ab34f8d17c2c054d3205cc0", size = 3095223, upload-time = "2025-09-24T13:51:04.472Z" }, + { url = "https://files.pythonhosted.org/packages/48/de/b59a620b1f3a129c3fecc2737104a0a7e04e79335bd3b0a1f1609744cf17/shapely-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:346ec0c1a0fcd32f57f00e4134d1200e14bf3f5ae12af87ba83ca275c502498c", size = 4030760, upload-time = "2025-09-24T13:51:06.455Z" }, + { url = "https://files.pythonhosted.org/packages/96/b3/c6655ee7232b417562bae192ae0d3ceaadb1cc0ffc2088a2ddf415456cc2/shapely-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6305993a35989391bd3476ee538a5c9a845861462327efe00dd11a5c8c709a99", size = 4170078, upload-time = "2025-09-24T13:51:08.584Z" }, + { url = "https://files.pythonhosted.org/packages/a0/8e/605c76808d73503c9333af8f6cbe7e1354d2d238bda5f88eea36bfe0f42a/shapely-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:c8876673449f3401f278c86eb33224c5764582f72b653a415d0e6672fde887bf", size = 1559178, upload-time = "2025-09-24T13:51:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/36/f7/d317eb232352a1f1444d11002d477e54514a4a6045536d49d0c59783c0da/shapely-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:4a44bc62a10d84c11a7a3d7c1c4fe857f7477c3506e24c9062da0db0ae0c449c", size = 1739756, upload-time = "2025-09-24T13:51:12.105Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c4/3ce4c2d9b6aabd27d26ec988f08cb877ba9e6e96086eff81bfea93e688c7/shapely-2.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9a522f460d28e2bf4e12396240a5fc1518788b2fcd73535166d748399ef0c223", size = 1831290, upload-time = "2025-09-24T13:51:13.56Z" }, + { url = "https://files.pythonhosted.org/packages/17/b9/f6ab8918fc15429f79cb04afa9f9913546212d7fb5e5196132a2af46676b/shapely-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ff629e00818033b8d71139565527ced7d776c269a49bd78c9df84e8f852190c", size = 1641463, upload-time = "2025-09-24T13:51:14.972Z" }, + { url = "https://files.pythonhosted.org/packages/a5/57/91d59ae525ca641e7ac5551c04c9503aee6f29b92b392f31790fcb1a4358/shapely-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f67b34271dedc3c653eba4e3d7111aa421d5be9b4c4c7d38d30907f796cb30df", size = 2970145, upload-time = "2025-09-24T13:51:16.961Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cb/4948be52ee1da6927831ab59e10d4c29baa2a714f599f1f0d1bc747f5777/shapely-2.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21952dc00df38a2c28375659b07a3979d22641aeb104751e769c3ee825aadecf", size = 3073806, upload-time = "2025-09-24T13:51:18.712Z" }, + { url = "https://files.pythonhosted.org/packages/03/83/f768a54af775eb41ef2e7bec8a0a0dbe7d2431c3e78c0a8bdba7ab17e446/shapely-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1f2f33f486777456586948e333a56ae21f35ae273be99255a191f5c1fa302eb4", size = 3980803, upload-time = "2025-09-24T13:51:20.37Z" }, + { url = "https://files.pythonhosted.org/packages/9f/cb/559c7c195807c91c79d38a1f6901384a2878a76fbdf3f1048893a9b7534d/shapely-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cf831a13e0d5a7eb519e96f58ec26e049b1fad411fc6fc23b162a7ce04d9cffc", size = 4133301, upload-time = "2025-09-24T13:51:21.887Z" }, + { url = "https://files.pythonhosted.org/packages/80/cd/60d5ae203241c53ef3abd2ef27c6800e21afd6c94e39db5315ea0cbafb4a/shapely-2.1.2-cp314-cp314-win32.whl", hash = "sha256:61edcd8d0d17dd99075d320a1dd39c0cb9616f7572f10ef91b4b5b00c4aeb566", size = 1583247, upload-time = "2025-09-24T13:51:23.401Z" }, + { url = "https://files.pythonhosted.org/packages/74/d4/135684f342e909330e50d31d441ace06bf83c7dc0777e11043f99167b123/shapely-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:a444e7afccdb0999e203b976adb37ea633725333e5b119ad40b1ca291ecf311c", size = 1773019, upload-time = "2025-09-24T13:51:24.873Z" }, + { url = "https://files.pythonhosted.org/packages/a3/05/a44f3f9f695fa3ada22786dc9da33c933da1cbc4bfe876fe3a100bafe263/shapely-2.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5ebe3f84c6112ad3d4632b1fd2290665aa75d4cef5f6c5d77c4c95b324527c6a", size = 1834137, upload-time = "2025-09-24T13:51:26.665Z" }, + { url = "https://files.pythonhosted.org/packages/52/7e/4d57db45bf314573427b0a70dfca15d912d108e6023f623947fa69f39b72/shapely-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5860eb9f00a1d49ebb14e881f5caf6c2cf472c7fd38bd7f253bbd34f934eb076", size = 1642884, upload-time = "2025-09-24T13:51:28.029Z" }, + { url = "https://files.pythonhosted.org/packages/5a/27/4e29c0a55d6d14ad7422bf86995d7ff3f54af0eba59617eb95caf84b9680/shapely-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b705c99c76695702656327b819c9660768ec33f5ce01fa32b2af62b56ba400a1", size = 3018320, upload-time = "2025-09-24T13:51:29.903Z" }, + { url = "https://files.pythonhosted.org/packages/9f/bb/992e6a3c463f4d29d4cd6ab8963b75b1b1040199edbd72beada4af46bde5/shapely-2.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a1fd0ea855b2cf7c9cddaf25543e914dd75af9de08785f20ca3085f2c9ca60b0", size = 3094931, upload-time = "2025-09-24T13:51:32.699Z" }, + { url = "https://files.pythonhosted.org/packages/9c/16/82e65e21070e473f0ed6451224ed9fa0be85033d17e0c6e7213a12f59d12/shapely-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:df90e2db118c3671a0754f38e36802db75fe0920d211a27481daf50a711fdf26", size = 4030406, upload-time = "2025-09-24T13:51:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/7c/75/c24ed871c576d7e2b64b04b1fe3d075157f6eb54e59670d3f5ffb36e25c7/shapely-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:361b6d45030b4ac64ddd0a26046906c8202eb60d0f9f53085f5179f1d23021a0", size = 4169511, upload-time = "2025-09-24T13:51:36.297Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f7/b3d1d6d18ebf55236eec1c681ce5e665742aab3c0b7b232720a7d43df7b6/shapely-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:b54df60f1fbdecc8ebc2c5b11870461a6417b3d617f555e5033f1505d36e5735", size = 1602607, upload-time = "2025-09-24T13:51:37.757Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f6/f09272a71976dfc138129b8faf435d064a811ae2f708cb147dccdf7aacdb/shapely-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:0036ac886e0923417932c2e6369b6c52e38e0ff5d9120b90eef5cd9a5fc5cae9", size = 1796682, upload-time = "2025-09-24T13:51:39.233Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz", hash = "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73", size = 32725, upload-time = "2018-12-10T00:59:58.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/fb/00a976f728d0d1fecfe898238ce23f502a721c0ac0ecfedb80e0d88c64e9/six-1.12.0-py2.py3-none-any.whl", hash = "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", size = 10586, upload-time = "2018-12-10T00:59:57.273Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, +] + +[[package]] +name = "soxr" +version = "0.5.0.post1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/c0/4429bf9b3be10e749149e286aa5c53775399ec62891c6b970456c6dca325/soxr-0.5.0.post1.tar.gz", hash = "sha256:7092b9f3e8a416044e1fa138c8172520757179763b85dc53aa9504f4813cff73", size = 170853, upload-time = "2024-08-31T03:43:33.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/28/dc62dae260a77603e8257e9b79078baa2ca4c0b4edc6f9f82c9113d6ef18/soxr-0.5.0.post1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6fb77b626773a966e3d8f6cb24f6f74b5327fa5dc90f1ff492450e9cdc03a378", size = 203648, upload-time = "2024-08-31T03:43:08.339Z" }, + { url = "https://files.pythonhosted.org/packages/0e/48/3e88329a695f6e0e38a3b171fff819d75d7cc055dae1ec5d5074f34d61e3/soxr-0.5.0.post1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:39e0f791ba178d69cd676485dbee37e75a34f20daa478d90341ecb7f6d9d690f", size = 159933, upload-time = "2024-08-31T03:43:10.053Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a5/6b439164be6871520f3d199554568a7656e96a867adbbe5bac179caf5776/soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f0b558f445ba4b64dbcb37b5f803052eee7d93b1dbbbb97b3ec1787cb5a28eb", size = 221010, upload-time = "2024-08-31T03:43:11.839Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/400e3bf7f29971abad85cb877e290060e5ec61fccd2fa319e3d85709c1be/soxr-0.5.0.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca6903671808e0a6078b0d146bb7a2952b118dfba44008b2aa60f221938ba829", size = 252471, upload-time = "2024-08-31T03:43:13.347Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/6a7e91bea7e6ca193ee429869b8f18548cd79759e064021ecb5756024c7c/soxr-0.5.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:c4d8d5283ed6f5efead0df2c05ae82c169cfdfcf5a82999c2d629c78b33775e8", size = 166723, upload-time = "2024-08-31T03:43:15.212Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e3/d422d279e51e6932e7b64f1170a4f61a7ee768e0f84c9233a5b62cd2c832/soxr-0.5.0.post1-cp312-abi3-macosx_10_14_x86_64.whl", hash = "sha256:fef509466c9c25f65eae0ce1e4b9ac9705d22c6038c914160ddaf459589c6e31", size = 199993, upload-time = "2024-08-31T03:43:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/20/f1/88adaca3c52e03bcb66b63d295df2e2d35bf355d19598c6ce84b20be7fca/soxr-0.5.0.post1-cp312-abi3-macosx_11_0_arm64.whl", hash = "sha256:4704ba6b13a3f1e41d12acf192878384c1c31f71ce606829c64abdf64a8d7d32", size = 156373, upload-time = "2024-08-31T03:43:18.633Z" }, + { url = "https://files.pythonhosted.org/packages/b8/38/bad15a9e615215c8219652ca554b601663ac3b7ac82a284aca53ec2ff48c/soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd052a66471a7335b22a6208601a9d0df7b46b8d087dce4ff6e13eed6a33a2a1", size = 216564, upload-time = "2024-08-31T03:43:20.789Z" }, + { url = "https://files.pythonhosted.org/packages/e1/1a/569ea0420a0c4801c2c8dd40d8d544989522f6014d51def689125f3f2935/soxr-0.5.0.post1-cp312-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f16810dd649ab1f433991d2a9661e9e6a116c2b4101039b53b3c3e90a094fc", size = 248455, upload-time = "2024-08-31T03:43:22.165Z" }, + { url = "https://files.pythonhosted.org/packages/bc/10/440f1ba3d4955e0dc740bbe4ce8968c254a3d644d013eb75eea729becdb8/soxr-0.5.0.post1-cp312-abi3-win_amd64.whl", hash = "sha256:b1be9fee90afb38546bdbd7bde714d1d9a8c5a45137f97478a83b65e7f3146f6", size = 164937, upload-time = "2024-08-31T03:43:23.671Z" }, +] + +[[package]] +name = "speechrecognition" +version = "3.8.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/e1/7f5678cd94ec1234269d23756dbdaa4c8cfaed973412f88ae8adf7893a50/SpeechRecognition-3.8.1-py2.py3-none-any.whl", hash = "sha256:4d8f73a0c05ec70331c3bacaa89ecc06dfa8d9aba0899276664cda06ab597e8e", size = 32833456, upload-time = "2017-12-05T13:58:29.977Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.43" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, + { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, + { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, + { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, + { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, + { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, + { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, + { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, + { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, + { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, + { url = "https://files.pythonhosted.org/packages/41/1c/a7260bd47a6fae7e03768bf66451437b36451143f36b285522b865987ced/sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3", size = 2130598, upload-time = "2025-08-11T15:51:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/8e/84/8a337454e82388283830b3586ad7847aa9c76fdd4f1df09cdd1f94591873/sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa", size = 2118415, upload-time = "2025-08-11T15:51:17.256Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ff/22ab2328148492c4d71899d62a0e65370ea66c877aea017a244a35733685/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9", size = 3248707, upload-time = "2025-08-11T15:52:38.444Z" }, + { url = "https://files.pythonhosted.org/packages/dc/29/11ae2c2b981de60187f7cbc84277d9d21f101093d1b2e945c63774477aba/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f", size = 3253602, upload-time = "2025-08-11T15:56:37.348Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/987b6c23b12c56d2be451bc70900f67dd7d989d52b1ee64f239cf19aec69/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738", size = 3183248, upload-time = "2025-08-11T15:52:39.865Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/29d216002d4593c2ce1c0ec2cec46dda77bfbcd221e24caa6e85eff53d89/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164", size = 3219363, upload-time = "2025-08-11T15:56:39.11Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e4/bd78b01919c524f190b4905d47e7630bf4130b9f48fd971ae1c6225b6f6a/sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d", size = 2096718, upload-time = "2025-08-11T15:55:05.349Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a5/ca2f07a2a201f9497de1928f787926613db6307992fe5cda97624eb07c2f/sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197", size = 2123200, upload-time = "2025-08-11T15:55:07.932Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, +] + +[[package]] +name = "starlette" +version = "0.41.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159, upload-time = "2024-11-18T19:45:04.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225, upload-time = "2024-11-18T19:45:02.027Z" }, +] + +[[package]] +name = "std-uritemplate" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/74/cc/f3d2e47d2fe828da95321ab0f4ac54e4a02294c86832469de33a048f6061/std_uritemplate-2.0.5.tar.gz", hash = "sha256:7703a886cce59d155c21b5acf1ad8d48db9f3322de98fa783a8396fbf35cbc06", size = 6015, upload-time = "2025-05-14T13:10:36.139Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/21/479d27b4597c6bf278e794ccceae40f721bc1cb0ff66a30ecb9bfb61ac9a/std_uritemplate-2.0.5-py3-none-any.whl", hash = "sha256:0f5184f8e6f315a01f92cfbed335f62f087e453e79cd586b67a724211e686c28", size = 6509, upload-time = "2025-05-14T13:10:34.983Z" }, +] + +[[package]] +name = "sympy" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, +] + +[[package]] +name = "tenacity" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/4d/6a19536c50b849338fcbe9290d562b52cbdcf30d8963d3588a68a4107df1/tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78", size = 47309, upload-time = "2024-07-05T07:25:31.836Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687", size = 28165, upload-time = "2024-07-05T07:25:29.591Z" }, +] + +[[package]] +name = "testing-common-database" +version = "2.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/3c/5f7eef6ce8a16314a39f2b905ebd5cd2bfdcbaabafb7fd71dc10c3f32c4d/testing.common.database-2.0.3.tar.gz", hash = "sha256:965d80b2985315325dc358c3061b174a712f4d4d5bf6a80b58b11f9a1dd86d73", size = 11535, upload-time = "2017-10-23T15:02:37.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/1a/ca1c39544ed92fa8ea121ff3bf05bb4838520c498942054235ebc4a83b36/testing.common.database-2.0.3-py2.py3-none-any.whl", hash = "sha256:e3ed492bf480a87f271f74c53b262caf5d85c8bc09989a8f534fa2283ec52492", size = 10500, upload-time = "2017-10-23T15:02:40.781Z" }, +] + +[[package]] +name = "testing-postgresql" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pg8000" }, + { name = "testing-common-database" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/5b/3bf1323697c4f4f0e8fb5c14d082dc2f005385ea139b19646c0fc9f1dbb7/testing.postgresql-1.3.0.tar.gz", hash = "sha256:8e1a69760369a7a8ffe63a66b6d95a5cd82db2fb976e4a8f85ffd24fbfc447d8", size = 11000, upload-time = "2016-02-04T13:57:05.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/76/d614d4bc950d961a73c952e9a2e0956d02d0869a86d3dfad070376863988/testing.postgresql-1.3.0-py2.py3-none-any.whl", hash = "sha256:1b41daeb98dfc8cd4a584bb91e8f5f4ab182993870f95257afe5f1ba6151a598", size = 8901, upload-time = "2016-02-04T13:57:11.857Z" }, +] + +[[package]] +name = "textract" +version = "1.6.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "beautifulsoup4" }, + { name = "chardet" }, + { name = "docx2txt" }, + { name = "extract-msg" }, + { name = "pdfminer-six" }, + { name = "python-pptx" }, + { name = "six" }, + { name = "speechrecognition" }, + { name = "xlrd" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/9f/dd29fcec368f007d44e51f0273489d5172a6d32ed9c796df5054fbb31c9f/textract-1.6.5.tar.gz", hash = "sha256:68f0f09056885821e6c43d8538987518daa94057c306679f2857cc5ee66ad850", size = 17871, upload-time = "2022-03-10T10:49:31.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6b/3e/ac16b6bf28edf78296aea7d0cb416b49ed30282ac8c711662541015ee6f3/textract-1.6.5-py3-none-any.whl", hash = "sha256:0accd78ec42864e3e3827f9ef798ced9aac4727b664303b724a198fed73fa438", size = 23140, upload-time = "2022-03-10T10:49:30.384Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, + { url = "https://files.pythonhosted.org/packages/7a/11/09d936d37f49f4f494ffe660af44acd2d99eb2429d60a57c71318af214e0/tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb", size = 1064919, upload-time = "2025-02-14T06:02:37.494Z" }, + { url = "https://files.pythonhosted.org/packages/80/0e/f38ba35713edb8d4197ae602e80837d574244ced7fb1b6070b31c29816e0/tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63", size = 1007877, upload-time = "2025-02-14T06:02:39.516Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/9197f77421e2a01373e27a79dd36efdd99e6b4115746ecc553318ecafbf0/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01", size = 1140095, upload-time = "2025-02-14T06:02:41.791Z" }, + { url = "https://files.pythonhosted.org/packages/f2/bb/4513da71cac187383541facd0291c4572b03ec23c561de5811781bbd988f/tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139", size = 1195649, upload-time = "2025-02-14T06:02:43Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5c/74e4c137530dd8504e97e3a41729b1103a4ac29036cbfd3250b11fd29451/tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a", size = 1258465, upload-time = "2025-02-14T06:02:45.046Z" }, + { url = "https://files.pythonhosted.org/packages/de/a8/8f499c179ec900783ffe133e9aab10044481679bb9aad78436d239eee716/tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95", size = 894669, upload-time = "2025-02-14T06:02:47.341Z" }, +] + +[[package]] +name = "tokenizers" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, +] + +[[package]] +name = "tools-module" +version = "0.1.0" +source = { editable = "modules/tools_module" } +dependencies = [ + { name = "common-module" }, + { name = "datasource" }, + { name = "flo-ai" }, + { name = "flo-cloud" }, + { name = "knowledge-base-module" }, + { name = "plugins-module" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "datasource", editable = "plugins/datasource" }, + { name = "flo-ai", specifier = ">=1.1.0rc5" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "knowledge-base-module", editable = "modules/knowledge_base_module" }, + { name = "plugins-module", editable = "modules/plugins_module" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, +] + +[[package]] +name = "torch" +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock", marker = "sys_platform != 'darwin'" }, + { name = "fsspec", marker = "sys_platform != 'darwin'" }, + { name = "jinja2", marker = "sys_platform != 'darwin'" }, + { name = "networkx", marker = "sys_platform != 'darwin'" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "setuptools", marker = "python_full_version >= '3.12' and sys_platform != 'darwin'" }, + { name = "sympy", marker = "sys_platform != 'darwin'" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "typing-extensions", marker = "sys_platform != 'darwin'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/c4/3e7a3887eba14e815e614db70b3b529112d1513d9dae6f4d43e373360b7f/torch-2.8.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:220a06fd7af8b653c35d359dfe1aaf32f65aa85befa342629f716acb134b9710", size = 102073391, upload-time = "2025-08-06T14:53:20.937Z" }, + { url = "https://files.pythonhosted.org/packages/5a/63/4fdc45a0304536e75a5e1b1bbfb1b56dd0e2743c48ee83ca729f7ce44162/torch-2.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:c12fa219f51a933d5f80eeb3a7a5d0cbe9168c0a14bbb4055f1979431660879b", size = 888063640, upload-time = "2025-08-06T14:55:05.325Z" }, + { url = "https://files.pythonhosted.org/packages/84/57/2f64161769610cf6b1c5ed782bd8a780e18a3c9d48931319f2887fa9d0b1/torch-2.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c7ef765e27551b2fbfc0f41bcf270e1292d9bf79f8e0724848b1682be6e80aa", size = 241366752, upload-time = "2025-08-06T14:53:38.692Z" }, + { url = "https://files.pythonhosted.org/packages/49/0c/2fd4df0d83a495bb5e54dca4474c4ec5f9c62db185421563deeb5dabf609/torch-2.8.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2fab4153768d433f8ed9279c8133a114a034a61e77a3a104dcdf54388838705", size = 101906089, upload-time = "2025-08-06T14:53:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/99/a8/6acf48d48838fb8fe480597d98a0668c2beb02ee4755cc136de92a0a956f/torch-2.8.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b2aca0939fb7e4d842561febbd4ffda67a8e958ff725c1c27e244e85e982173c", size = 887913624, upload-time = "2025-08-06T14:56:44.33Z" }, + { url = "https://files.pythonhosted.org/packages/af/8a/5c87f08e3abd825c7dfecef5a0f1d9aa5df5dd0e3fd1fa2f490a8e512402/torch-2.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:2f4ac52f0130275d7517b03a33d2493bab3693c83dcfadf4f81688ea82147d2e", size = 241326087, upload-time = "2025-08-06T14:53:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/10/4e/469ced5a0603245d6a19a556e9053300033f9c5baccf43a3d25ba73e189e/torch-2.8.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2b2f96814e0345f5a5aed9bf9734efa913678ed19caf6dc2cddb7930672d6128", size = 101936856, upload-time = "2025-08-06T14:54:01.526Z" }, + { url = "https://files.pythonhosted.org/packages/16/82/3948e54c01b2109238357c6f86242e6ecbf0c63a1af46906772902f82057/torch-2.8.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:65616ca8ec6f43245e1f5f296603e33923f4c30f93d65e103d9e50c25b35150b", size = 887922844, upload-time = "2025-08-06T14:55:50.78Z" }, + { url = "https://files.pythonhosted.org/packages/e3/54/941ea0a860f2717d86a811adf0c2cd01b3983bdd460d0803053c4e0b8649/torch-2.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:659df54119ae03e83a800addc125856effda88b016dfc54d9f65215c3975be16", size = 241330968, upload-time = "2025-08-06T14:54:45.293Z" }, + { url = "https://files.pythonhosted.org/packages/15/0e/8a800e093b7f7430dbaefa80075aee9158ec22e4c4fc3c1a66e4fb96cb4f/torch-2.8.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:83c13411a26fac3d101fe8035a6b0476ae606deb8688e904e796a3534c197def", size = 102020139, upload-time = "2025-08-06T14:54:39.047Z" }, + { url = "https://files.pythonhosted.org/packages/4a/15/5e488ca0bc6162c86a33b58642bc577c84ded17c7b72d97e49b5833e2d73/torch-2.8.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8f0a9d617a66509ded240add3754e462430a6c1fc5589f86c17b433dd808f97a", size = 887990692, upload-time = "2025-08-06T14:56:18.286Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a8/6a04e4b54472fc5dba7ca2341ab219e529f3c07b6941059fbf18dccac31f/torch-2.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a7242b86f42be98ac674b88a4988643b9bc6145437ec8f048fea23f72feb5eca", size = 241603453, upload-time = "2025-08-06T14:55:22.945Z" }, +] + +[[package]] +name = "torchvision" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "pillow" }, + { name = "requests" }, + { name = "torch", marker = "sys_platform != 'darwin'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/84/46481327771d4f63feb59dd0d9e1cd6a42e985dbd371965f486a5bf9f323/torchvision-0.16.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:31fdf289bdfb2976f65a14f79f6ddd1ee60113db34622674918e61521c2dc41f", size = 1705744, upload-time = "2023-10-04T17:02:41.079Z" }, + { url = "https://files.pythonhosted.org/packages/dd/48/85ef87f1548620d7c3743cbe135b90d8cf673fc7b8594361917558d765b6/torchvision-0.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2294a6514a31a6fda562288b28cf6db57877237f4b56ff693262f237a7ed4035", size = 1617096, upload-time = "2023-10-04T17:02:10.682Z" }, + { url = "https://files.pythonhosted.org/packages/08/aa/033a0a1bca4a3b8f81f97709024e053da37cf5c970f3dc5d76233c920b99/torchvision-0.16.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:6a24a1e83e4bc7a31b39ef05d2ca4cd2182e95ff10f525edffe1473f7ce16ca1", size = 6920174, upload-time = "2023-10-04T17:03:04.79Z" }, + { url = "https://files.pythonhosted.org/packages/ab/1a/56e7df23d2ef1526094f7f895f00a2151a67ffa36dfb85d7c24246b25d81/torchvision-0.16.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9ed5f21e5a56e466667c6f9f6f93dba2a75e29921108bd70043eaf8e9ba0a7cc", size = 14076642, upload-time = "2023-10-04T17:02:55.002Z" }, + { url = "https://files.pythonhosted.org/packages/20/ac/ab6f42af83349e679b03c9bb18354740c6b58b17dba329fb408730230584/torchvision-0.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:9ee3d4df7d4a84f883f8ad11fb6510549f40f68dd5469eae601d7e02fb4809b2", size = 1262967, upload-time = "2023-10-04T17:02:32.918Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "transformers" +version = "4.56.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "huggingface-hub" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "regex" }, + { name = "requests" }, + { name = "safetensors" }, + { name = "tokenizers" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/82/0bcfddd134cdf53440becb5e738257cc3cf34cf229d63b57bfd288e6579f/transformers-4.56.2.tar.gz", hash = "sha256:5e7c623e2d7494105c726dd10f6f90c2c99a55ebe86eef7233765abd0cb1c529", size = 9844296, upload-time = "2025-09-19T15:16:26.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/26/2591b48412bde75e33bfd292034103ffe41743cacd03120e3242516cd143/transformers-4.56.2-py3-none-any.whl", hash = "sha256:79c03d0e85b26cb573c109ff9eafa96f3c8d4febfd8a0774e8bba32702dd6dde", size = 11608055, upload-time = "2025-09-19T15:16:23.736Z" }, +] + +[[package]] +name = "triton" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools", marker = "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/39/43325b3b651d50187e591eefa22e236b2981afcebaefd4f2fc0ea99df191/triton-3.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b70f5e6a41e52e48cfc087436c8a28c17ff98db369447bcaff3b887a3ab4467", size = 155531138, upload-time = "2025-07-30T19:58:29.908Z" }, + { url = "https://files.pythonhosted.org/packages/d0/66/b1eb52839f563623d185f0927eb3530ee4d5ffe9d377cdaf5346b306689e/triton-3.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31c1d84a5c0ec2c0f8e8a072d7fd150cab84a9c239eaddc6706c081bfae4eb04", size = 155560068, upload-time = "2025-07-30T19:58:37.081Z" }, + { url = "https://files.pythonhosted.org/packages/30/7b/0a685684ed5322d2af0bddefed7906674f67974aa88b0fae6e82e3b766f6/triton-3.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00be2964616f4c619193cb0d1b29a99bd4b001d7dc333816073f92cf2a8ccdeb", size = 155569223, upload-time = "2025-07-30T19:58:44.017Z" }, + { url = "https://files.pythonhosted.org/packages/20/63/8cb444ad5cdb25d999b7d647abac25af0ee37d292afc009940c05b82dda0/triton-3.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7936b18a3499ed62059414d7df563e6c163c5e16c3773678a3ee3d417865035d", size = 155659780, upload-time = "2025-07-30T19:58:51.171Z" }, +] + +[[package]] +name = "twilio" +version = "9.8.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aiohttp-retry" }, + { name = "pyjwt" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/ab/e8593cd07d1cd3cf76566a32a3bd0515f73ba6cb791dba669d1b989c75bf/twilio-9.8.5.tar.gz", hash = "sha256:b69dc2226294ef579fcdd92b502aef7982a60577110a0799f8c2b739428e1dcf", size = 942213, upload-time = "2025-10-28T10:06:58.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/6c/b7d23e45bcfc0d717343b2a1e5478a529a6b8bb4d1a18a2184ab4c1aa014/twilio-9.8.5-py2.py3-none-any.whl", hash = "sha256:7e1d04bb2cd480a97937f1fbea5fe35377925b6f70745baade08bde9cd4fb319", size = 1834872, upload-time = "2025-10-28T10:06:56.406Z" }, +] + +[[package]] +name = "ty" +version = "0.0.1a28" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/10/8b/8a87df1d93ad4e2e88f08f94941b9f9479ccb323100fb52253cecbde8978/ty-0.0.1a28.tar.gz", hash = "sha256:6454f2bc0d5b716aeaba3e32c4585a14a0d6bfc7e90d5aba64539fa33df824c4", size = 4584440, upload-time = "2025-11-26T00:27:09.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/24/7a/768f3d9945066a9a44f9ed280e4717409b772fca1ef165e112827abf2ee6/ty-0.0.1a28-py3-none-linux_armv6l.whl", hash = "sha256:0ea28aaaf35176a75ce85da7a4b7f577f3a3319a1eb4d13c0105629e239a7d95", size = 9500811, upload-time = "2025-11-26T00:27:26.134Z" }, + { url = "https://files.pythonhosted.org/packages/bc/cc/d6e4e433bd91043d1eb2ecc7908000585100a5cbdd548d85082e1e07865d/ty-0.0.1a28-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:463f8b6bee5c3d338a535c40764a4f209f5465caecbc9f7358ee2a7f8b2d321e", size = 9286280, upload-time = "2025-11-26T00:27:27.753Z" }, + { url = "https://files.pythonhosted.org/packages/77/68/00e8e7f280fbef2e89df10e6c9ce896dd6716bffc2e8e7ece58503b767e5/ty-0.0.1a28-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7d037ea9f896e6e9b96ca066959e2a7600db0da9e4038f1247c9337af253cc8c", size = 8810453, upload-time = "2025-11-26T00:27:07.812Z" }, + { url = "https://files.pythonhosted.org/packages/10/1b/ef72e26f487272b60156e0f527a5fbc27da799accad3420d01bc08101ca8/ty-0.0.1a28-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad5099ffaa891391733d6fd85bcdd00ad68042a2da4f80a114b9e7044e6f7460", size = 9098344, upload-time = "2025-11-26T00:27:22.531Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/e56c5623c604d20fa26d320a73bc4fb7c2db28e14ba021409c767c4ddfdf/ty-0.0.1a28-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:587652aecb8d238adcb45ae7cd12efd27b9778f74b636cbbe5dcc2e938f9af4e", size = 9303714, upload-time = "2025-11-26T00:26:57.946Z" }, + { url = "https://files.pythonhosted.org/packages/eb/04/61518d3eac0357305e3a06c9a4cedbb49bc9f343d38ba26194c15a81f22e/ty-0.0.1a28-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d9556c87419264ffc3071a249f89d890a29df5d09abd8d216bac850ad2d7ba9", size = 9668395, upload-time = "2025-11-26T00:27:12.893Z" }, + { url = "https://files.pythonhosted.org/packages/fd/01/ef22fc8e3d9415d2ab2def0f562fe6ee7ae28b99dc180acd636486a9f818/ty-0.0.1a28-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7481abc03a0aabf966c9e1cccb18c9edbb7cf01ec011568cd24feb1ab45faef7", size = 10269943, upload-time = "2025-11-26T00:27:02.018Z" }, + { url = "https://files.pythonhosted.org/packages/16/f7/bb94f55c6f3bfc3da543e6b1ec32877e107b2afb8cae3057ae9f5a8f4eaa/ty-0.0.1a28-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fd4926f668b733aeadd09f7d16e63af30cba5438bbba1274f950a1059c8d64", size = 10023310, upload-time = "2025-11-26T00:27:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/9a/58/ebaefa1b27b4aea8156f1b43d6d431afd8061e76e1c96e83dad8a0dcb555/ty-0.0.1a28-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fb119d7db1a064dd74ccedf78bdc5caae30cf5de421dff972a849bcff411269", size = 10034408, upload-time = "2025-11-26T00:27:18.561Z" }, + { url = "https://files.pythonhosted.org/packages/da/66/97be24c8abbcd803dab65cd2b430330e449e4542c0e0396e15fe32f4e2c2/ty-0.0.1a28-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7f7d744920af9ceaf7fe6db290366abefbcffd7cce54f15e8cef6a86e2df31", size = 9597359, upload-time = "2025-11-26T00:27:03.803Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c8/a7451f1ca4d8ed12c025a5c306e9527bd9269abacdf2b2b8d0ca8bb90a13/ty-0.0.1a28-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c20c6cf7e786ecf6c8f34892240b4b1ae8b1adce52243868aa400c80b7a9bc1d", size = 9069439, upload-time = "2025-11-26T00:27:14.768Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b9/d212887e13f3db925287f6be5addaf37190070956c960c73e22f93509273/ty-0.0.1a28-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:54c94a06c0236dfd249217e28816b6aedfc40e71d5b5131924efa3b095dfcf1a", size = 9332037, upload-time = "2025-11-26T00:27:00.138Z" }, + { url = "https://files.pythonhosted.org/packages/1d/14/3dc72136a72d354cdc93b509c35f4a426869879fa9e0346f1cd7d2bba3f7/ty-0.0.1a28-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1a15eb2535229ab65aaafbe3fb22c3d289c4e34cda92fb748815573b6d52fe3a", size = 9428504, upload-time = "2025-11-26T00:27:16.541Z" }, + { url = "https://files.pythonhosted.org/packages/d5/65/e15984e245fe330dfdc665cc7c492c633149ff97b3f95af32bdd08b74fdb/ty-0.0.1a28-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6c2ebd5314707cd26aabe77b1d664e597b7b29a8d07fed5091f986ebdaa261a9", size = 9720869, upload-time = "2025-11-26T00:27:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/a5/91/5826e5f78fc5ee685b34a1904cb5da8b3ab83d4c04e5574c4542728c2422/ty-0.0.1a28-py3-none-win32.whl", hash = "sha256:ae10abd8575d28744d905979632040222581ba364281abf75baf8f269a10ffc3", size = 8950581, upload-time = "2025-11-26T00:27:24.346Z" }, + { url = "https://files.pythonhosted.org/packages/4f/5e/6380d565dfb286634facbe71fb389dc9a8d4379f18d55a6feac392bd5755/ty-0.0.1a28-py3-none-win_amd64.whl", hash = "sha256:44ef82c1169c050ad9e91b2d76251be097ddd163719735cf7e5a978065f6b87c", size = 9789598, upload-time = "2025-11-26T00:27:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/55/48/fec040641bd4c9599fecc0bb74e697c79ea3fa234b25b04b68823aca55a5/ty-0.0.1a28-py3-none-win_arm64.whl", hash = "sha256:051c1d43df50366fb8e795ae52af8f2015b79d176dbb82cdd45668074847ddf3", size = 9278405, upload-time = "2025-11-26T00:27:11.066Z" }, +] + +[[package]] +name = "typer" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492, upload-time = "2025-10-20T17:03:49.445Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, +] + +[[package]] +name = "ujson" +version = "5.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0", size = 7156583, upload-time = "2025-08-20T11:57:02.452Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f", size = 55248, upload-time = "2025-08-20T11:55:19.033Z" }, + { url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58", size = 53156, upload-time = "2025-08-20T11:55:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26", size = 57657, upload-time = "2025-08-20T11:55:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a", size = 59779, upload-time = "2025-08-20T11:55:22.772Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6", size = 57284, upload-time = "2025-08-20T11:55:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b", size = 1036395, upload-time = "2025-08-20T11:55:25.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba", size = 1195731, upload-time = "2025-08-20T11:55:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3", size = 1088710, upload-time = "2025-08-20T11:55:29.426Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d3/9ba310e07969bc9906eb7548731e33a0f448b122ad9705fed699c9b29345/ujson-5.11.0-cp311-cp311-win32.whl", hash = "sha256:e979fbc469a7f77f04ec2f4e853ba00c441bf2b06720aa259f0f720561335e34", size = 39648, upload-time = "2025-08-20T11:55:31.194Z" }, + { url = "https://files.pythonhosted.org/packages/57/f7/da05b4a8819f1360be9e71fb20182f0bb3ec611a36c3f213f4d20709e099/ujson-5.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:683f57f0dd3acdd7d9aff1de0528d603aafcb0e6d126e3dc7ce8b020a28f5d01", size = 43717, upload-time = "2025-08-20T11:55:32.241Z" }, + { url = "https://files.pythonhosted.org/packages/9a/cc/f3f9ac0f24f00a623a48d97dc3814df5c2dc368cfb00031aa4141527a24b/ujson-5.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:7855ccea3f8dad5e66d8445d754fc1cf80265a4272b5f8059ebc7ec29b8d0835", size = 38402, upload-time = "2025-08-20T11:55:33.641Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702", size = 55434, upload-time = "2025-08-20T11:55:34.987Z" }, + { url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d", size = 53190, upload-time = "2025-08-20T11:55:36.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80", size = 57600, upload-time = "2025-08-20T11:55:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/405103cae24899df688a3431c776e00528bd4799e7d68820e7ebcf824f92/ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc", size = 59791, upload-time = "2025-08-20T11:55:38.877Z" }, + { url = "https://files.pythonhosted.org/packages/17/7b/2dcbc2bbfdbf68f2368fb21ab0f6735e872290bb604c75f6e06b81edcb3f/ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c", size = 57356, upload-time = "2025-08-20T11:55:40.036Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/fea2ca18986a366c750767b694430d5ded6b20b6985fddca72f74af38a4c/ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5", size = 1036313, upload-time = "2025-08-20T11:55:41.408Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bb/d4220bd7532eac6288d8115db51710fa2d7d271250797b0bfba9f1e755af/ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b", size = 1195782, upload-time = "2025-08-20T11:55:43.357Z" }, + { url = "https://files.pythonhosted.org/packages/80/47/226e540aa38878ce1194454385701d82df538ccb5ff8db2cf1641dde849a/ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc", size = 1088817, upload-time = "2025-08-20T11:55:45.262Z" }, + { url = "https://files.pythonhosted.org/packages/7e/81/546042f0b23c9040d61d46ea5ca76f0cc5e0d399180ddfb2ae976ebff5b5/ujson-5.11.0-cp312-cp312-win32.whl", hash = "sha256:be6b0eaf92cae8cdee4d4c9e074bde43ef1c590ed5ba037ea26c9632fb479c88", size = 39757, upload-time = "2025-08-20T11:55:46.522Z" }, + { url = "https://files.pythonhosted.org/packages/44/1b/27c05dc8c9728f44875d74b5bfa948ce91f6c33349232619279f35c6e817/ujson-5.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:b7b136cc6abc7619124fd897ef75f8e63105298b5ca9bdf43ebd0e1fa0ee105f", size = 43859, upload-time = "2025-08-20T11:55:47.987Z" }, + { url = "https://files.pythonhosted.org/packages/22/2d/37b6557c97c3409c202c838aa9c960ca3896843b4295c4b7bb2bbd260664/ujson-5.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:6cd2df62f24c506a0ba322d5e4fe4466d47a9467b57e881ee15a31f7ecf68ff6", size = 38361, upload-time = "2025-08-20T11:55:49.122Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ec/2de9dd371d52c377abc05d2b725645326c4562fc87296a8907c7bcdf2db7/ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53", size = 55435, upload-time = "2025-08-20T11:55:50.243Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a4/f611f816eac3a581d8a4372f6967c3ed41eddbae4008d1d77f223f1a4e0a/ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752", size = 53193, upload-time = "2025-08-20T11:55:51.373Z" }, + { url = "https://files.pythonhosted.org/packages/e9/c5/c161940967184de96f5cbbbcce45b562a4bf851d60f4c677704b1770136d/ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50", size = 57603, upload-time = "2025-08-20T11:55:52.583Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d6/c7b2444238f5b2e2d0e3dab300b9ddc3606e4b1f0e4bed5a48157cebc792/ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a", size = 59794, upload-time = "2025-08-20T11:55:53.69Z" }, + { url = "https://files.pythonhosted.org/packages/fe/a3/292551f936d3d02d9af148f53e1bc04306b00a7cf1fcbb86fa0d1c887242/ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03", size = 57363, upload-time = "2025-08-20T11:55:54.843Z" }, + { url = "https://files.pythonhosted.org/packages/90/a6/82cfa70448831b1a9e73f882225980b5c689bf539ec6400b31656a60ea46/ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701", size = 1036311, upload-time = "2025-08-20T11:55:56.197Z" }, + { url = "https://files.pythonhosted.org/packages/84/5c/96e2266be50f21e9b27acaee8ca8f23ea0b85cb998c33d4f53147687839b/ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60", size = 1195783, upload-time = "2025-08-20T11:55:58.081Z" }, + { url = "https://files.pythonhosted.org/packages/8d/20/78abe3d808cf3bb3e76f71fca46cd208317bf461c905d79f0d26b9df20f1/ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328", size = 1088822, upload-time = "2025-08-20T11:55:59.469Z" }, + { url = "https://files.pythonhosted.org/packages/d8/50/8856e24bec5e2fc7f775d867aeb7a3f137359356200ac44658f1f2c834b2/ujson-5.11.0-cp313-cp313-win32.whl", hash = "sha256:8fa2af7c1459204b7a42e98263b069bd535ea0cd978b4d6982f35af5a04a4241", size = 39753, upload-time = "2025-08-20T11:56:01.345Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d8/1baee0f4179a4d0f5ce086832147b6cc9b7731c24ca08e14a3fdb8d39c32/ujson-5.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:34032aeca4510a7c7102bd5933f59a37f63891f30a0706fb46487ab6f0edf8f0", size = 43866, upload-time = "2025-08-20T11:56:02.552Z" }, + { url = "https://files.pythonhosted.org/packages/a9/8c/6d85ef5be82c6d66adced3ec5ef23353ed710a11f70b0b6a836878396334/ujson-5.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:ce076f2df2e1aa62b685086fbad67f2b1d3048369664b4cdccc50707325401f9", size = 38363, upload-time = "2025-08-20T11:56:03.688Z" }, + { url = "https://files.pythonhosted.org/packages/28/08/4518146f4984d112764b1dfa6fb7bad691c44a401adadaa5e23ccd930053/ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302", size = 55462, upload-time = "2025-08-20T11:56:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/29/37/2107b9a62168867a692654d8766b81bd2fd1e1ba13e2ec90555861e02b0c/ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d", size = 53246, upload-time = "2025-08-20T11:56:06.054Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f8/25583c70f83788edbe3ca62ce6c1b79eff465d78dec5eb2b2b56b3e98b33/ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638", size = 57631, upload-time = "2025-08-20T11:56:07.374Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ca/19b3a632933a09d696f10dc1b0dfa1d692e65ad507d12340116ce4f67967/ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c", size = 59877, upload-time = "2025-08-20T11:56:08.534Z" }, + { url = "https://files.pythonhosted.org/packages/55/7a/4572af5324ad4b2bfdd2321e898a527050290147b4ea337a79a0e4e87ec7/ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba", size = 57363, upload-time = "2025-08-20T11:56:09.758Z" }, + { url = "https://files.pythonhosted.org/packages/7b/71/a2b8c19cf4e1efe53cf439cdf7198ac60ae15471d2f1040b490c1f0f831f/ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018", size = 1036394, upload-time = "2025-08-20T11:56:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3e/7b98668cba3bb3735929c31b999b374ebc02c19dfa98dfebaeeb5c8597ca/ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840", size = 1195837, upload-time = "2025-08-20T11:56:12.6Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ea/8870f208c20b43571a5c409ebb2fe9b9dba5f494e9e60f9314ac01ea8f78/ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c", size = 1088837, upload-time = "2025-08-20T11:56:14.15Z" }, + { url = "https://files.pythonhosted.org/packages/63/b6/c0e6607e37fa47929920a685a968c6b990a802dec65e9c5181e97845985d/ujson-5.11.0-cp314-cp314-win32.whl", hash = "sha256:1d663b96eb34c93392e9caae19c099ec4133ba21654b081956613327f0e973ac", size = 41022, upload-time = "2025-08-20T11:56:15.509Z" }, + { url = "https://files.pythonhosted.org/packages/4e/56/f4fe86b4c9000affd63e9219e59b222dc48b01c534533093e798bf617a7e/ujson-5.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:849e65b696f0d242833f1df4182096cedc50d414215d1371fca85c541fbff629", size = 45111, upload-time = "2025-08-20T11:56:16.597Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f3/669437f0280308db4783b12a6d88c00730b394327d8334cc7a32ef218e64/ujson-5.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:e73df8648c9470af2b6a6bf5250d4744ad2cf3d774dcf8c6e31f018bdd04d764", size = 39682, upload-time = "2025-08-20T11:56:17.763Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cd/e9809b064a89fe5c4184649adeb13c1b98652db3f8518980b04227358574/ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433", size = 55759, upload-time = "2025-08-20T11:56:18.882Z" }, + { url = "https://files.pythonhosted.org/packages/1b/be/ae26a6321179ebbb3a2e2685b9007c71bcda41ad7a77bbbe164005e956fc/ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3", size = 53634, upload-time = "2025-08-20T11:56:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e9/fb4a220ee6939db099f4cfeeae796ecb91e7584ad4d445d4ca7f994a9135/ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823", size = 58547, upload-time = "2025-08-20T11:56:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f8/fc4b952b8f5fea09ea3397a0bd0ad019e474b204cabcb947cead5d4d1ffc/ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9", size = 60489, upload-time = "2025-08-20T11:56:22.342Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e5/af5491dfda4f8b77e24cf3da68ee0d1552f99a13e5c622f4cef1380925c3/ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076", size = 58035, upload-time = "2025-08-20T11:56:23.92Z" }, + { url = "https://files.pythonhosted.org/packages/c4/09/0945349dd41f25cc8c38d78ace49f14c5052c5bbb7257d2f466fa7bdb533/ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c", size = 1037212, upload-time = "2025-08-20T11:56:25.274Z" }, + { url = "https://files.pythonhosted.org/packages/49/44/8e04496acb3d5a1cbee3a54828d9652f67a37523efa3d3b18a347339680a/ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746", size = 1196500, upload-time = "2025-08-20T11:56:27.517Z" }, + { url = "https://files.pythonhosted.org/packages/64/ae/4bc825860d679a0f208a19af2f39206dfd804ace2403330fdc3170334a2f/ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef", size = 1089487, upload-time = "2025-08-20T11:56:29.07Z" }, + { url = "https://files.pythonhosted.org/packages/30/ed/5a057199fb0a5deabe0957073a1c1c1c02a3e99476cd03daee98ea21fa57/ujson-5.11.0-cp314-cp314t-win32.whl", hash = "sha256:aa6d7a5e09217ff93234e050e3e380da62b084e26b9f2e277d2606406a2fc2e5", size = 41859, upload-time = "2025-08-20T11:56:30.495Z" }, + { url = "https://files.pythonhosted.org/packages/aa/03/b19c6176bdf1dc13ed84b886e99677a52764861b6cc023d5e7b6ebda249d/ujson-5.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:48055e1061c1bb1f79e75b4ac39e821f3f35a9b82de17fce92c3140149009bec", size = 46183, upload-time = "2025-08-20T11:56:31.574Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ca/a0413a3874b2dc1708b8796ca895bf363292f9c70b2e8ca482b7dbc0259d/ujson-5.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:1194b943e951092db611011cb8dbdb6cf94a3b816ed07906e14d3bc6ce0e90ab", size = 40264, upload-time = "2025-08-20T11:56:32.773Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, + { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, + { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844", size = 56584, upload-time = "2025-08-20T11:56:52.89Z" }, + { url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49", size = 51588, upload-time = "2025-08-20T11:56:54.054Z" }, + { url = "https://files.pythonhosted.org/packages/52/5b/8c5e33228f7f83f05719964db59f3f9f276d272dc43752fa3bbf0df53e7b/ujson-5.11.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:416389ec19ef5f2013592f791486bef712ebce0cd59299bf9df1ba40bb2f6e04", size = 43835, upload-time = "2025-08-20T11:56:55.237Z" }, +] + +[[package]] +name = "uritemplate" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "user-management-module" +version = "0.0.1" +source = { editable = "modules/user_management_module" } +dependencies = [ + { name = "auth-module" }, + { name = "authlib" }, + { name = "bcrypt" }, + { name = "db-repo-module" }, + { name = "dependency-injector" }, + { name = "fastapi" }, + { name = "google-api-python-client" }, + { name = "google-auth" }, + { name = "google-auth-httplib2" }, + { name = "pydantic", extra = ["email"] }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncpg" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testing-postgresql" }, +] + +[package.metadata] +requires-dist = [ + { name = "auth-module", editable = "modules/auth_module" }, + { name = "authlib", specifier = ">=1.3.2,<2.0.0" }, + { name = "bcrypt", specifier = ">=4.2.1,<5.0.0" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, + { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, + { name = "google-api-python-client", specifier = ">=2.0.0,<3.0.0" }, + { name = "google-auth", specifier = ">=2.0.0,<3.0.0" }, + { name = "google-auth-httplib2", specifier = ">=0.2.0,<1.0.0" }, + { name = "pydantic", extras = ["email"], specifier = ">=2.9.2,<3.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.30.0,<1.0.0" }, + { name = "pytest", specifier = ">=8.3.4,<9.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, + { name = "testing-postgresql", specifier = ">=1.3.0,<2.0.0" }, +] + +[[package]] +name = "uvicorn" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, +] + +[[package]] +name = "voice-agents-module" +version = "0.1.0" +source = { editable = "modules/voice_agents_module" } +dependencies = [ + { name = "common-module" }, + { name = "db-repo-module" }, + { name = "flo-cloud" }, + { name = "httpx" }, + { name = "twilio" }, +] + +[package.metadata] +requires-dist = [ + { name = "common-module", editable = "modules/common_module" }, + { name = "db-repo-module", editable = "modules/db_repo_module" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "httpx", specifier = ">=0.27.0,<1.0.0" }, + { name = "twilio" }, +] + +[[package]] +name = "wait-for2" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/7c/ea09d6a11990a8aa3ceac206fb7ea82366ea2c200caa87966611e0e18597/wait_for2-0.4.1.tar.gz", hash = "sha256:7f415415d21845c441391d6b4abe68f5959d2c0fbe927c2f61be28a297bc2acb", size = 17519, upload-time = "2025-06-13T19:45:00.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/56/0f88040567af7ff376ec9eaabe18fd980a4f5089d3bf8c7a32598ef06b8d/wait_for2-0.4.1-py3-none-any.whl", hash = "sha256:c694503e8c7420929e8a86bcffd9b00d55acaec2c14223a2b1e92bdc2ebf2154", size = 10985, upload-time = "2025-06-13T19:44:58.82Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "wavefront" +version = "0.1.0" +source = { virtual = "." } + +[package.dev-dependencies] +dev = [ + { name = "pre-commit" }, + { name = "ty" }, +] + +[package.metadata] + +[package.metadata.requires-dev] +dev = [ + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "ty", specifier = ">=0.0.1a28" }, +] + +[[package]] +name = "websockets" +version = "14.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/54/8359678c726243d19fae38ca14a334e740782336c9f19700858c4eb64a1e/websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5", size = 164394, upload-time = "2025-01-19T21:00:56.431Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b6/504695fb9a33df0ca56d157f5985660b5fc5b4bf8c78f121578d2d653392/websockets-14.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3bdc8c692c866ce5fefcaf07d2b55c91d6922ac397e031ef9b774e5b9ea42166", size = 163088, upload-time = "2025-01-19T20:59:06.435Z" }, + { url = "https://files.pythonhosted.org/packages/81/26/ebfb8f6abe963c795122439c6433c4ae1e061aaedfc7eff32d09394afbae/websockets-14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c93215fac5dadc63e51bcc6dceca72e72267c11def401d6668622b47675b097f", size = 160745, upload-time = "2025-01-19T20:59:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c6/1435ad6f6dcbff80bb95e8986704c3174da8866ddb751184046f5c139ef6/websockets-14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c9b6535c0e2cf8a6bf938064fb754aaceb1e6a4a51a80d884cd5db569886910", size = 160995, upload-time = "2025-01-19T20:59:12.816Z" }, + { url = "https://files.pythonhosted.org/packages/96/63/900c27cfe8be1a1f2433fc77cd46771cf26ba57e6bdc7cf9e63644a61863/websockets-14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a52a6d7cf6938e04e9dceb949d35fbdf58ac14deea26e685ab6368e73744e4c", size = 170543, upload-time = "2025-01-19T20:59:15.026Z" }, + { url = "https://files.pythonhosted.org/packages/00/8b/bec2bdba92af0762d42d4410593c1d7d28e9bfd952c97a3729df603dc6ea/websockets-14.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f05702e93203a6ff5226e21d9b40c037761b2cfb637187c9802c10f58e40473", size = 169546, upload-time = "2025-01-19T20:59:17.156Z" }, + { url = "https://files.pythonhosted.org/packages/6b/a9/37531cb5b994f12a57dec3da2200ef7aadffef82d888a4c29a0d781568e4/websockets-14.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22441c81a6748a53bfcb98951d58d1af0661ab47a536af08920d129b4d1c3473", size = 169911, upload-time = "2025-01-19T20:59:18.623Z" }, + { url = "https://files.pythonhosted.org/packages/60/d5/a6eadba2ed9f7e65d677fec539ab14a9b83de2b484ab5fe15d3d6d208c28/websockets-14.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd9b868d78b194790e6236d9cbc46d68aba4b75b22497eb4ab64fa640c3af56", size = 170183, upload-time = "2025-01-19T20:59:20.743Z" }, + { url = "https://files.pythonhosted.org/packages/76/57/a338ccb00d1df881c1d1ee1f2a20c9c1b5b29b51e9e0191ee515d254fea6/websockets-14.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a5a20d5843886d34ff8c57424cc65a1deda4375729cbca4cb6b3353f3ce4142", size = 169623, upload-time = "2025-01-19T20:59:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/e5f7c33db0cb2c1d03b79fd60d189a1da044e2661f5fd01d629451e1db89/websockets-14.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34277a29f5303d54ec6468fb525d99c99938607bc96b8d72d675dee2b9f5bf1d", size = 169583, upload-time = "2025-01-19T20:59:23.656Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2e/2b4662237060063a22e5fc40d46300a07142afe30302b634b4eebd717c07/websockets-14.2-cp311-cp311-win32.whl", hash = "sha256:02687db35dbc7d25fd541a602b5f8e451a238ffa033030b172ff86a93cb5dc2a", size = 163969, upload-time = "2025-01-19T20:59:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/94/a5/0cda64e1851e73fc1ecdae6f42487babb06e55cb2f0dc8904b81d8ef6857/websockets-14.2-cp311-cp311-win_amd64.whl", hash = "sha256:862e9967b46c07d4dcd2532e9e8e3c2825e004ffbf91a5ef9dde519ee2effb0b", size = 164408, upload-time = "2025-01-19T20:59:28.105Z" }, + { url = "https://files.pythonhosted.org/packages/c1/81/04f7a397653dc8bec94ddc071f34833e8b99b13ef1a3804c149d59f92c18/websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c", size = 163096, upload-time = "2025-01-19T20:59:29.763Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c5/de30e88557e4d70988ed4d2eabd73fd3e1e52456b9f3a4e9564d86353b6d/websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967", size = 160758, upload-time = "2025-01-19T20:59:32.095Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/d130d668781f2c77d106c007b6c6c1d9db68239107c41ba109f09e6c218a/websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990", size = 160995, upload-time = "2025-01-19T20:59:33.527Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bc/f6678a0ff17246df4f06765e22fc9d98d1b11a258cc50c5968b33d6742a1/websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda", size = 170815, upload-time = "2025-01-19T20:59:35.837Z" }, + { url = "https://files.pythonhosted.org/packages/d8/b2/8070cb970c2e4122a6ef38bc5b203415fd46460e025652e1ee3f2f43a9a3/websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95", size = 169759, upload-time = "2025-01-19T20:59:38.216Z" }, + { url = "https://files.pythonhosted.org/packages/81/da/72f7caabd94652e6eb7e92ed2d3da818626e70b4f2b15a854ef60bf501ec/websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3", size = 170178, upload-time = "2025-01-19T20:59:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/31/e0/812725b6deca8afd3a08a2e81b3c4c120c17f68c9b84522a520b816cda58/websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9", size = 170453, upload-time = "2025-01-19T20:59:41.996Z" }, + { url = "https://files.pythonhosted.org/packages/66/d3/8275dbc231e5ba9bb0c4f93144394b4194402a7a0c8ffaca5307a58ab5e3/websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267", size = 169830, upload-time = "2025-01-19T20:59:44.669Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ae/e7d1a56755ae15ad5a94e80dd490ad09e345365199600b2629b18ee37bc7/websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe", size = 169824, upload-time = "2025-01-19T20:59:46.932Z" }, + { url = "https://files.pythonhosted.org/packages/b6/32/88ccdd63cb261e77b882e706108d072e4f1c839ed723bf91a3e1f216bf60/websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205", size = 163981, upload-time = "2025-01-19T20:59:49.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7d/32cdb77990b3bdc34a306e0a0f73a1275221e9a66d869f6ff833c95b56ef/websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce", size = 164421, upload-time = "2025-01-19T20:59:50.674Z" }, + { url = "https://files.pythonhosted.org/packages/82/94/4f9b55099a4603ac53c2912e1f043d6c49d23e94dd82a9ce1eb554a90215/websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e", size = 163102, upload-time = "2025-01-19T20:59:52.177Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b7/7484905215627909d9a79ae07070057afe477433fdacb59bf608ce86365a/websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad", size = 160766, upload-time = "2025-01-19T20:59:54.368Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a4/edb62efc84adb61883c7d2c6ad65181cb087c64252138e12d655989eec05/websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03", size = 160998, upload-time = "2025-01-19T20:59:56.671Z" }, + { url = "https://files.pythonhosted.org/packages/f5/79/036d320dc894b96af14eac2529967a6fc8b74f03b83c487e7a0e9043d842/websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f", size = 170780, upload-time = "2025-01-19T20:59:58.085Z" }, + { url = "https://files.pythonhosted.org/packages/63/75/5737d21ee4dd7e4b9d487ee044af24a935e36a9ff1e1419d684feedcba71/websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5", size = 169717, upload-time = "2025-01-19T20:59:59.545Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3c/bf9b2c396ed86a0b4a92ff4cdaee09753d3ee389be738e92b9bbd0330b64/websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a", size = 170155, upload-time = "2025-01-19T21:00:01.887Z" }, + { url = "https://files.pythonhosted.org/packages/75/2d/83a5aca7247a655b1da5eb0ee73413abd5c3a57fc8b92915805e6033359d/websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20", size = 170495, upload-time = "2025-01-19T21:00:04.064Z" }, + { url = "https://files.pythonhosted.org/packages/79/dd/699238a92761e2f943885e091486378813ac8f43e3c84990bc394c2be93e/websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2", size = 169880, upload-time = "2025-01-19T21:00:05.695Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c9/67a8f08923cf55ce61aadda72089e3ed4353a95a3a4bc8bf42082810e580/websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307", size = 169856, upload-time = "2025-01-19T21:00:07.192Z" }, + { url = "https://files.pythonhosted.org/packages/17/b1/1ffdb2680c64e9c3921d99db460546194c40d4acbef999a18c37aa4d58a3/websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc", size = 163974, upload-time = "2025-01-19T21:00:08.698Z" }, + { url = "https://files.pythonhosted.org/packages/14/13/8b7fc4cb551b9cfd9890f0fd66e53c18a06240319915533b033a56a3d520/websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f", size = 164420, upload-time = "2025-01-19T21:00:10.182Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c8/d529f8a32ce40d98309f4470780631e971a5a842b60aec864833b3615786/websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b", size = 157416, upload-time = "2025-01-19T21:00:54.843Z" }, +] + +[[package]] +name = "win32-setctime" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, +] + +[[package]] +name = "workflow-job" +version = "0.0.1" +source = { editable = "background_jobs/workflow_job" } +dependencies = [ + { name = "agents-module" }, + { name = "common-module" }, + { name = "flo-cloud" }, + { name = "flo-utils" }, + { name = "python-dotenv" }, +] + +[package.metadata] +requires-dist = [ + { name = "agents-module", editable = "modules/agents_module" }, + { name = "common-module", editable = "modules/common_module" }, + { name = "flo-cloud", editable = "packages/flo_cloud" }, + { name = "flo-utils", editable = "packages/flo_utils" }, + { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, +] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "xlrd" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/05/ec9d4fcbbb74bbf4da9f622b3b61aec541e4eccf31d3c60c5422ec027ce2/xlrd-1.2.0.tar.gz", hash = "sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2", size = 554079, upload-time = "2018-12-15T17:47:48.133Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/16/63576a1a001752e34bf8ea62e367997530dc553b689356b9879339cf45a4/xlrd-1.2.0-py2.py3-none-any.whl", hash = "sha256:e551fb498759fa3a5384a94ccd4c3c02eb7c00ea424426e212ac0c57be9dfbde", size = 103251, upload-time = "2018-12-15T17:47:45.792Z" }, +] + +[[package]] +name = "xlsxwriter" +version = "3.2.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/2c/c06ef49dc36e7954e55b802a8b231770d286a9758b3d936bd1e04ce5ba88/xlsxwriter-3.2.9.tar.gz", hash = "sha256:254b1c37a368c444eac6e2f867405cc9e461b0ed97a3233b2ac1e574efb4140c", size = 215940, upload-time = "2025-09-16T00:16:21.63Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/0c/3662f4a66880196a590b202f0db82d919dd2f89e99a27fadef91c4a33d41/xlsxwriter-3.2.9-py3-none-any.whl", hash = "sha256:9a5db42bc5dff014806c58a20b9eae7322a134abb6fce3c92c181bfb275ec5b3", size = 175315, upload-time = "2025-09-16T00:16:20.108Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 17143dd673b4daf125e056700cf03db41e6c08a5 Mon Sep 17 00:00:00 2001 From: vizsatiz Date: Fri, 5 Dec 2025 11:37:31 +0530 Subject: [PATCH 02/13] Create workspace file --- wavefront.code-workspace | 18 ++++++++++++++++++ wavefront/server/pyproject.toml | 3 +++ 2 files changed, 21 insertions(+) create mode 100644 wavefront.code-workspace diff --git a/wavefront.code-workspace b/wavefront.code-workspace new file mode 100644 index 00000000..940cc0b5 --- /dev/null +++ b/wavefront.code-workspace @@ -0,0 +1,18 @@ +{ + "folders": [ + { + "name": "flo-ai", + "path": "flo_ai" + }, + { + "name": "wavefront", + "path": "wavefront" + } + ], + "settings": { + "python.analysis.extraPaths": [ + "${workspaceFolder:wavefront}", + "${workspaceFolder:flo-ai}" + ] + } +} \ No newline at end of file diff --git a/wavefront/server/pyproject.toml b/wavefront/server/pyproject.toml index b5df56d4..42dfb802 100644 --- a/wavefront/server/pyproject.toml +++ b/wavefront/server/pyproject.toml @@ -13,6 +13,9 @@ dev = [ "ty>=0.0.1a28", ] +[tool.ty.environment] +root = ["../server"] + [tool.uv.workspace] members = [ "apps/*", From d8d3ab21be8ecabeb300ba25ff10540f55d7f291 Mon Sep 17 00:00:00 2001 From: vizsatiz Date: Fri, 5 Dec 2025 12:04:44 +0530 Subject: [PATCH 03/13] Setting up floware docker --- .gitignore | 4 +--- docker/floware.Dockerfile | 45 +++++++++++++++++++++++++++++++++++++++ wavefront.code-workspace | 18 ---------------- 3 files changed, 46 insertions(+), 21 deletions(-) create mode 100644 docker/floware.Dockerfile delete mode 100644 wavefront.code-workspace diff --git a/.gitignore b/.gitignore index 2e0165e9..41a82198 100644 --- a/.gitignore +++ b/.gitignore @@ -15,10 +15,8 @@ bin *.yaml examples/local/* .logs -scratch_pad.py .* *.png *.html -usecases/ -compare_gemini_outputs_v1.py node_modules/ +flo_ai/usecases diff --git a/docker/floware.Dockerfile b/docker/floware.Dockerfile new file mode 100644 index 00000000..016e8be8 --- /dev/null +++ b/docker/floware.Dockerfile @@ -0,0 +1,45 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ + +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + libgl1 \ + libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +COPY pyproject.toml uv.lock ./ + +COPY modules/auth_module /app/modules/auth_module +COPY modules/common_module /app/modules/common_module +COPY modules/db_repo_module /app/modules/db_repo_module +COPY modules/gold_module /app/modules/gold_module +COPY modules/insights_module /app/modules/insights_module +COPY modules/knowledge_base_module /app/modules/knowledge_base_module +COPY modules/user_management_module /app/modules/user_management_module +COPY modules/llm_inference_config_module /app/modules/llm_inference_config_module +COPY modules/agents_module /app/modules/agents_module +COPY modules/plugins_module/ /app/modules/plugins_module +COPY modules/product_analysis_module /app/modules/product_analysis_module +COPY modules/inference_module /app/modules/inference_module +COPY modules/image_search_module /app/modules/image_search_module +COPY modules/tools_module /app/modules/tools_module +COPY modules/voice_agents_module /app/modules/voice_agents_module +COPY modules/api_services_module /app/modules/api_services_module + +COPY packages/flo_cloud /app/packages/flo_cloud +COPY packages/flo_utils /app/packages/flo_utils + +COPY plugins/datasource /app/plugins/datasource +COPY plugins/authenticator /app/plugins/authenticator + +COPY apps/floware /app/apps/floware + +RUN uv sync --package floware --frozen --no-dev + +WORKDIR /app/apps/floware/floware + +CMD ["uv", "run", "server.py"] diff --git a/wavefront.code-workspace b/wavefront.code-workspace deleted file mode 100644 index 940cc0b5..00000000 --- a/wavefront.code-workspace +++ /dev/null @@ -1,18 +0,0 @@ -{ - "folders": [ - { - "name": "flo-ai", - "path": "flo_ai" - }, - { - "name": "wavefront", - "path": "wavefront" - } - ], - "settings": { - "python.analysis.extraPaths": [ - "${workspaceFolder:wavefront}", - "${workspaceFolder:flo-ai}" - ] - } -} \ No newline at end of file From 3148eb9658858c262276d5feddfa97f83a9c3f5f Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Tue, 9 Dec 2025 16:31:12 +0530 Subject: [PATCH 04/13] feat: bring console web to wavefront repo --- wavefront/client/README.md | 54 + wavefront/client/components.json | 22 + wavefront/client/eslint.config.js | 28 + wavefront/client/package.json | 67 + wavefront/client/postcss.config.js | 5 + wavefront/client/public/background.webp | Bin 0 -> 62706 bytes wavefront/client/public/config.js | 4 + .../client/public/font/SFPRODISPLAYMEDIUM.OTF | Bin 0 -> 335512 bytes wavefront/client/public/logo.svg | 9 + wavefront/client/server.cjs | 82 + wavefront/client/src/App.tsx | 22 + wavefront/client/src/api/agent-service.ts | 100 ++ .../client/src/api/api-service-service.ts | 62 + wavefront/client/src/api/app-service.ts | 61 + .../client/src/api/authenticator-service.ts | 80 + .../client/src/api/console-auth-service.ts | 24 + .../client/src/api/data-pipeline-service.ts | 186 +++ .../client/src/api/datasources-service.ts | 147 ++ wavefront/client/src/api/index.ts | 109 ++ .../client/src/api/knowledge-base-service.ts | 252 +++ .../client/src/api/llm-inference-service.ts | 56 + .../src/api/message-processor-service.ts | 74 + .../client/src/api/model-inference-service.ts | 143 ++ wavefront/client/src/api/namespace-service.ts | 29 + .../client/src/api/stt-config-service.ts | 58 + .../src/api/telephony-config-service.ts | 62 + wavefront/client/src/api/tool-service.ts | 33 + .../client/src/api/tts-config-service.ts | 58 + wavefront/client/src/api/user-service.ts | 26 + .../client/src/api/voice-agent-service.ts | 69 + wavefront/client/src/api/workflow-service.ts | 172 +++ .../client/src/assets/icons/ai-agent-icon.tsx | 12 + .../client/src/assets/icons/api-icon.tsx | 25 + .../client/src/assets/icons/datasources.tsx | 14 + wavefront/client/src/assets/icons/index.ts | 10 + .../src/assets/icons/model-inference-icon.tsx | 20 + .../assets/icons/model-repository-icon.tsx | 13 + .../src/assets/icons/permission-icon.tsx | 22 + .../client/src/assets/icons/phone-icon.tsx | 21 + .../client/src/assets/icons/rag-icon.tsx | 12 + .../client/src/assets/icons/rootflo-icon.tsx | 46 + .../client/src/assets/icons/workflow-icon.tsx | 18 + wavefront/client/src/components/AgentCard.tsx | 36 + .../client/src/components/ApiServiceCard.tsx | 40 + wavefront/client/src/components/AppCard.tsx | 63 + .../src/components/AuthenticatorCard.tsx | 42 + wavefront/client/src/components/ChatBot.tsx | 459 ++++++ .../client/src/components/DashboardLayout.tsx | 143 ++ .../client/src/components/DatasourceCard.tsx | 55 + .../components/DeleteConfirmationDialog.tsx | 83 + wavefront/client/src/components/EmptyCard.tsx | 25 + .../client/src/components/FunctionCard.tsx | 32 + .../client/src/components/InferencePopup.tsx | 513 ++++++ .../src/components/KnowledgeBaseCard.tsx | 32 + .../client/src/components/LLMConfigCard.tsx | 66 + wavefront/client/src/components/ModelCard.tsx | 37 + .../client/src/components/ProtectedLayout.tsx | 32 + .../client/src/components/ResourceCard.tsx | 102 ++ wavefront/client/src/components/Stream.tsx | 97 ++ .../client/src/components/WorkflowCard.tsx | 36 + .../src/components/WorkflowPipelineCard.tsx | 56 + .../client/src/components/topbar/Topbar.tsx | 150 ++ .../client/src/components/ui/alert-dialog.tsx | 104 ++ wavefront/client/src/components/ui/alert.tsx | 44 + .../client/src/components/ui/breadcrumb.tsx | 90 ++ wavefront/client/src/components/ui/button.tsx | 67 + .../client/src/components/ui/checkbox.tsx | 26 + .../client/src/components/ui/command.tsx | 132 ++ wavefront/client/src/components/ui/dialog.tsx | 121 ++ .../src/components/ui/dropdown-menu.tsx | 180 +++ wavefront/client/src/components/ui/empty.tsx | 85 + wavefront/client/src/components/ui/field.tsx | 222 +++ wavefront/client/src/components/ui/form.tsx | 149 ++ wavefront/client/src/components/ui/input.tsx | 22 + wavefront/client/src/components/ui/label.tsx | 19 + .../client/src/components/ui/popover.tsx | 31 + wavefront/client/src/components/ui/select.tsx | 143 ++ .../client/src/components/ui/separator.tsx | 28 + .../client/src/components/ui/skeleton.tsx | 7 + wavefront/client/src/components/ui/slider.tsx | 23 + .../client/src/components/ui/spinner.tsx | 9 + wavefront/client/src/components/ui/switch.tsx | 27 + wavefront/client/src/components/ui/table.tsx | 76 + wavefront/client/src/components/ui/tabs.tsx | 53 + .../client/src/components/ui/textarea.tsx | 21 + .../client/src/components/ui/tooltip.tsx | 30 + wavefront/client/src/config/authenticators.ts | 388 +++++ wavefront/client/src/config/env.ts | 14 + wavefront/client/src/config/llm-providers.ts | 538 +++++++ .../client/src/config/telephony-providers.ts | 193 +++ .../client/src/config/voice-providers.ts | 292 ++++ .../client/src/hooks/data/fetch-hooks.ts | 527 +++++++ wavefront/client/src/hooks/data/index.ts | 4 + .../src/hooks/data/mutation-functions.ts | 35 + .../client/src/hooks/data/mutation-hooks.ts | 83 + .../client/src/hooks/data/query-functions.ts | 545 +++++++ wavefront/client/src/hooks/data/query-keys.ts | 191 +++ wavefront/client/src/hooks/index.ts | 1 + wavefront/client/src/index.css | 169 ++ wavefront/client/src/lib/axios.ts | 71 + wavefront/client/src/lib/constants.ts | 2 + wavefront/client/src/lib/react-query.ts | 21 + wavefront/client/src/lib/utils.ts | 94 ++ wavefront/client/src/main.tsx | 15 + wavefront/client/src/not-found/index.tsx | 48 + .../apps/[appId]/agents/CreateAgentDialog.tsx | 219 +++ .../apps/[appId]/agents/EditAgentDialog.tsx | 266 ++++ .../src/pages/apps/[appId]/agents/[id].tsx | 677 ++++++++ .../src/pages/apps/[appId]/agents/index.tsx | 207 +++ .../src/pages/apps/[appId]/agents/schemas.ts | 9 + .../api-services/CreateApiServiceDialog.tsx | 166 ++ .../pages/apps/[appId]/api-services/[id].tsx | 1375 +++++++++++++++++ .../pages/apps/[appId]/api-services/index.tsx | 199 +++ .../CreateAuthenticatorDialog.tsx | 470 ++++++ .../apps/[appId]/authenticators/[authId].tsx | 635 ++++++++ .../apps/[appId]/authenticators/index.tsx | 188 +++ .../datasources/CreateDatasourceDialog.tsx | 248 +++ .../datasources/EditDatasourceDialog.tsx | 229 +++ .../apps/[appId]/datasources/YamlCreation.tsx | 103 ++ .../apps/[appId]/datasources/YamlView.tsx | 366 +++++ .../pages/apps/[appId]/datasources/Yamls.tsx | 89 ++ .../[appId]/datasources/[datasourceId].tsx | 420 +++++ .../pages/apps/[appId]/datasources/index.tsx | 193 +++ .../pages/apps/[appId]/datasources/schemas.ts | 25 + .../functions/CreateFunctionDialog.tsx | 206 +++ .../apps/[appId]/functions/[functionId].tsx | 335 ++++ .../pages/apps/[appId]/functions/index.tsx | 202 +++ .../CreateKnowledgeBaseDialog.tsx | 220 +++ .../[appId]/knowledge-bases/[kbId]/index.tsx | 651 ++++++++ .../apps/[appId]/knowledge-bases/index.tsx | 206 +++ .../CreateLLMInferenceDialog.tsx | 468 ++++++ .../apps/[appId]/llm-inference/[configId].tsx | 501 ++++++ .../apps/[appId]/llm-inference/index.tsx | 211 +++ .../CreateModelInferenceDialog.tsx | 177 +++ .../[appId]/model-inference/[modelId].tsx | 422 +++++ .../apps/[appId]/model-inference/index.tsx | 212 +++ .../apps/[appId]/pipelines/[pipelineId].tsx | 686 ++++++++ .../pages/apps/[appId]/pipelines/create.tsx | 173 +++ .../pages/apps/[appId]/pipelines/index.tsx | 332 ++++ .../voice-agents/CreateVoiceAgentDialog.tsx | 471 ++++++ .../voice-agents/EditVoiceAgentDialog.tsx | 469 ++++++ .../voice-agents/OutboundCallDialog.tsx | 170 ++ .../pages/apps/[appId]/voice-agents/index.tsx | 226 +++ .../apps/[appId]/voice-agents/layout.tsx | 107 ++ .../stt-configs/CreateSttConfigDialog.tsx | 409 +++++ .../stt-configs/EditSttConfigDialog.tsx | 416 +++++ .../voice-agents/stt-configs/index.tsx | 194 +++ .../CreateTelephonyConfigDialog.tsx | 473 ++++++ .../EditTelephonyConfigDialog.tsx | 482 ++++++ .../voice-agents/telephony-configs/index.tsx | 199 +++ .../tts-configs/CreateTtsConfigDialog.tsx | 469 ++++++ .../tts-configs/EditTtsConfigDialog.tsx | 437 ++++++ .../voice-agents/tts-configs/index.tsx | 197 +++ .../workflows/CreateWorkflowDialog.tsx | 252 +++ .../src/pages/apps/[appId]/workflows/[id].tsx | 821 ++++++++++ .../pages/apps/[appId]/workflows/index.tsx | 179 +++ .../pages/apps/[appId]/workflows/layout.tsx | 97 ++ .../CreateWorkflowPipelineDialog.tsx | 156 ++ .../pipelines/[workflowPipelineId].tsx | 575 +++++++ .../[appId]/workflows/pipelines/index.tsx | 188 +++ wavefront/client/src/pages/apps/create.tsx | 259 ++++ .../client/src/pages/apps/edit/[appId].tsx | 233 +++ wavefront/client/src/pages/apps/index.tsx | 111 ++ wavefront/client/src/pages/apps/layout.tsx | 148 ++ wavefront/client/src/pages/apps/schemas.ts | 62 + .../src/pages/forgot-password/index.tsx | 90 ++ wavefront/client/src/pages/login/index.tsx | 162 ++ wavefront/client/src/pages/logout/index.tsx | 42 + .../client/src/pages/reset-password/index.tsx | 131 ++ wavefront/client/src/pages/types.ts | 170 ++ wavefront/client/src/router/index.tsx | 99 ++ wavefront/client/src/router/routes.tsx | 207 +++ wavefront/client/src/store/auth-store.ts | 22 + wavefront/client/src/store/dashboard-store.ts | 23 + wavefront/client/src/store/index.ts | 3 + .../client/src/store/notification-store.ts | 35 + wavefront/client/src/types.ts | 5 + wavefront/client/src/types/agent.ts | 189 +++ wavefront/client/src/types/api-service.ts | 58 + wavefront/client/src/types/app.ts | 49 + wavefront/client/src/types/authenticator.ts | 98 ++ wavefront/client/src/types/datasource.ts | 94 ++ .../client/src/types/llm-inference-config.ts | 47 + .../client/src/types/message-processor.ts | 56 + wavefront/client/src/types/pipeline.ts | 154 ++ wavefront/client/src/types/stt-config.ts | 61 + .../client/src/types/telephony-config.ts | 84 + wavefront/client/src/types/tool.ts | 58 + wavefront/client/src/types/tts-config.ts | 75 + wavefront/client/src/types/user.ts | 6 + wavefront/client/src/types/voice-agent.ts | 77 + wavefront/client/src/types/workflow.ts | 179 +++ wavefront/client/src/utils/form-validation.ts | 17 + wavefront/client/src/utils/regex.ts | 4 + wavefront/client/src/utils/scroll.ts | 13 + .../client/src/utils/string-formatting.ts | 18 + wavefront/client/src/vite-env.d.ts | 1 + wavefront/client/tailwind.config.js | 49 + wavefront/client/tsconfig.app.json | 31 + wavefront/client/tsconfig.json | 10 + wavefront/client/tsconfig.node.json | 25 + wavefront/client/vite.config.ts | 17 + 202 files changed, 30691 insertions(+) create mode 100644 wavefront/client/README.md create mode 100644 wavefront/client/components.json create mode 100644 wavefront/client/eslint.config.js create mode 100644 wavefront/client/package.json create mode 100644 wavefront/client/postcss.config.js create mode 100644 wavefront/client/public/background.webp create mode 100644 wavefront/client/public/config.js create mode 100644 wavefront/client/public/font/SFPRODISPLAYMEDIUM.OTF create mode 100644 wavefront/client/public/logo.svg create mode 100644 wavefront/client/server.cjs create mode 100644 wavefront/client/src/App.tsx create mode 100644 wavefront/client/src/api/agent-service.ts create mode 100644 wavefront/client/src/api/api-service-service.ts create mode 100644 wavefront/client/src/api/app-service.ts create mode 100644 wavefront/client/src/api/authenticator-service.ts create mode 100644 wavefront/client/src/api/console-auth-service.ts create mode 100644 wavefront/client/src/api/data-pipeline-service.ts create mode 100644 wavefront/client/src/api/datasources-service.ts create mode 100644 wavefront/client/src/api/index.ts create mode 100644 wavefront/client/src/api/knowledge-base-service.ts create mode 100644 wavefront/client/src/api/llm-inference-service.ts create mode 100644 wavefront/client/src/api/message-processor-service.ts create mode 100644 wavefront/client/src/api/model-inference-service.ts create mode 100644 wavefront/client/src/api/namespace-service.ts create mode 100644 wavefront/client/src/api/stt-config-service.ts create mode 100644 wavefront/client/src/api/telephony-config-service.ts create mode 100644 wavefront/client/src/api/tool-service.ts create mode 100644 wavefront/client/src/api/tts-config-service.ts create mode 100644 wavefront/client/src/api/user-service.ts create mode 100644 wavefront/client/src/api/voice-agent-service.ts create mode 100644 wavefront/client/src/api/workflow-service.ts create mode 100644 wavefront/client/src/assets/icons/ai-agent-icon.tsx create mode 100644 wavefront/client/src/assets/icons/api-icon.tsx create mode 100644 wavefront/client/src/assets/icons/datasources.tsx create mode 100644 wavefront/client/src/assets/icons/index.ts create mode 100644 wavefront/client/src/assets/icons/model-inference-icon.tsx create mode 100644 wavefront/client/src/assets/icons/model-repository-icon.tsx create mode 100644 wavefront/client/src/assets/icons/permission-icon.tsx create mode 100644 wavefront/client/src/assets/icons/phone-icon.tsx create mode 100644 wavefront/client/src/assets/icons/rag-icon.tsx create mode 100644 wavefront/client/src/assets/icons/rootflo-icon.tsx create mode 100644 wavefront/client/src/assets/icons/workflow-icon.tsx create mode 100644 wavefront/client/src/components/AgentCard.tsx create mode 100644 wavefront/client/src/components/ApiServiceCard.tsx create mode 100644 wavefront/client/src/components/AppCard.tsx create mode 100644 wavefront/client/src/components/AuthenticatorCard.tsx create mode 100644 wavefront/client/src/components/ChatBot.tsx create mode 100644 wavefront/client/src/components/DashboardLayout.tsx create mode 100644 wavefront/client/src/components/DatasourceCard.tsx create mode 100644 wavefront/client/src/components/DeleteConfirmationDialog.tsx create mode 100644 wavefront/client/src/components/EmptyCard.tsx create mode 100644 wavefront/client/src/components/FunctionCard.tsx create mode 100644 wavefront/client/src/components/InferencePopup.tsx create mode 100644 wavefront/client/src/components/KnowledgeBaseCard.tsx create mode 100644 wavefront/client/src/components/LLMConfigCard.tsx create mode 100644 wavefront/client/src/components/ModelCard.tsx create mode 100644 wavefront/client/src/components/ProtectedLayout.tsx create mode 100644 wavefront/client/src/components/ResourceCard.tsx create mode 100644 wavefront/client/src/components/Stream.tsx create mode 100644 wavefront/client/src/components/WorkflowCard.tsx create mode 100644 wavefront/client/src/components/WorkflowPipelineCard.tsx create mode 100644 wavefront/client/src/components/topbar/Topbar.tsx create mode 100644 wavefront/client/src/components/ui/alert-dialog.tsx create mode 100644 wavefront/client/src/components/ui/alert.tsx create mode 100644 wavefront/client/src/components/ui/breadcrumb.tsx create mode 100644 wavefront/client/src/components/ui/button.tsx create mode 100644 wavefront/client/src/components/ui/checkbox.tsx create mode 100644 wavefront/client/src/components/ui/command.tsx create mode 100644 wavefront/client/src/components/ui/dialog.tsx create mode 100644 wavefront/client/src/components/ui/dropdown-menu.tsx create mode 100644 wavefront/client/src/components/ui/empty.tsx create mode 100644 wavefront/client/src/components/ui/field.tsx create mode 100644 wavefront/client/src/components/ui/form.tsx create mode 100644 wavefront/client/src/components/ui/input.tsx create mode 100644 wavefront/client/src/components/ui/label.tsx create mode 100644 wavefront/client/src/components/ui/popover.tsx create mode 100644 wavefront/client/src/components/ui/select.tsx create mode 100644 wavefront/client/src/components/ui/separator.tsx create mode 100644 wavefront/client/src/components/ui/skeleton.tsx create mode 100644 wavefront/client/src/components/ui/slider.tsx create mode 100644 wavefront/client/src/components/ui/spinner.tsx create mode 100644 wavefront/client/src/components/ui/switch.tsx create mode 100644 wavefront/client/src/components/ui/table.tsx create mode 100644 wavefront/client/src/components/ui/tabs.tsx create mode 100644 wavefront/client/src/components/ui/textarea.tsx create mode 100644 wavefront/client/src/components/ui/tooltip.tsx create mode 100644 wavefront/client/src/config/authenticators.ts create mode 100644 wavefront/client/src/config/env.ts create mode 100644 wavefront/client/src/config/llm-providers.ts create mode 100644 wavefront/client/src/config/telephony-providers.ts create mode 100644 wavefront/client/src/config/voice-providers.ts create mode 100644 wavefront/client/src/hooks/data/fetch-hooks.ts create mode 100644 wavefront/client/src/hooks/data/index.ts create mode 100644 wavefront/client/src/hooks/data/mutation-functions.ts create mode 100644 wavefront/client/src/hooks/data/mutation-hooks.ts create mode 100644 wavefront/client/src/hooks/data/query-functions.ts create mode 100644 wavefront/client/src/hooks/data/query-keys.ts create mode 100644 wavefront/client/src/hooks/index.ts create mode 100644 wavefront/client/src/index.css create mode 100644 wavefront/client/src/lib/axios.ts create mode 100644 wavefront/client/src/lib/constants.ts create mode 100644 wavefront/client/src/lib/react-query.ts create mode 100644 wavefront/client/src/lib/utils.ts create mode 100644 wavefront/client/src/main.tsx create mode 100644 wavefront/client/src/not-found/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/agents/CreateAgentDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/agents/EditAgentDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/agents/[id].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/agents/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/agents/schemas.ts create mode 100644 wavefront/client/src/pages/apps/[appId]/api-services/CreateApiServiceDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/api-services/[id].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/api-services/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/authenticators/CreateAuthenticatorDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/authenticators/[authId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/authenticators/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/CreateDatasourceDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/EditDatasourceDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/YamlCreation.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/YamlView.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/Yamls.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/[datasourceId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/datasources/schemas.ts create mode 100644 wavefront/client/src/pages/apps/[appId]/functions/CreateFunctionDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/functions/[functionId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/functions/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/knowledge-bases/CreateKnowledgeBaseDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/knowledge-bases/[kbId]/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/knowledge-bases/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/llm-inference/CreateLLMInferenceDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/llm-inference/[configId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/llm-inference/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/model-inference/CreateModelInferenceDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/model-inference/[modelId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/model-inference/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/pipelines/[pipelineId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/pipelines/create.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/pipelines/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/CreateVoiceAgentDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/EditVoiceAgentDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/OutboundCallDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/layout.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/stt-configs/CreateSttConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/stt-configs/EditSttConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/stt-configs/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/telephony-configs/CreateTelephonyConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/telephony-configs/EditTelephonyConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/telephony-configs/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/tts-configs/CreateTtsConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/tts-configs/EditTtsConfigDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/voice-agents/tts-configs/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/CreateWorkflowDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/[id].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/index.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/layout.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/pipelines/CreateWorkflowPipelineDialog.tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/pipelines/[workflowPipelineId].tsx create mode 100644 wavefront/client/src/pages/apps/[appId]/workflows/pipelines/index.tsx create mode 100644 wavefront/client/src/pages/apps/create.tsx create mode 100644 wavefront/client/src/pages/apps/edit/[appId].tsx create mode 100644 wavefront/client/src/pages/apps/index.tsx create mode 100644 wavefront/client/src/pages/apps/layout.tsx create mode 100644 wavefront/client/src/pages/apps/schemas.ts create mode 100644 wavefront/client/src/pages/forgot-password/index.tsx create mode 100644 wavefront/client/src/pages/login/index.tsx create mode 100644 wavefront/client/src/pages/logout/index.tsx create mode 100644 wavefront/client/src/pages/reset-password/index.tsx create mode 100644 wavefront/client/src/pages/types.ts create mode 100644 wavefront/client/src/router/index.tsx create mode 100644 wavefront/client/src/router/routes.tsx create mode 100644 wavefront/client/src/store/auth-store.ts create mode 100644 wavefront/client/src/store/dashboard-store.ts create mode 100644 wavefront/client/src/store/index.ts create mode 100644 wavefront/client/src/store/notification-store.ts create mode 100644 wavefront/client/src/types.ts create mode 100644 wavefront/client/src/types/agent.ts create mode 100644 wavefront/client/src/types/api-service.ts create mode 100644 wavefront/client/src/types/app.ts create mode 100644 wavefront/client/src/types/authenticator.ts create mode 100644 wavefront/client/src/types/datasource.ts create mode 100644 wavefront/client/src/types/llm-inference-config.ts create mode 100644 wavefront/client/src/types/message-processor.ts create mode 100644 wavefront/client/src/types/pipeline.ts create mode 100644 wavefront/client/src/types/stt-config.ts create mode 100644 wavefront/client/src/types/telephony-config.ts create mode 100644 wavefront/client/src/types/tool.ts create mode 100644 wavefront/client/src/types/tts-config.ts create mode 100644 wavefront/client/src/types/user.ts create mode 100644 wavefront/client/src/types/voice-agent.ts create mode 100644 wavefront/client/src/types/workflow.ts create mode 100644 wavefront/client/src/utils/form-validation.ts create mode 100644 wavefront/client/src/utils/regex.ts create mode 100644 wavefront/client/src/utils/scroll.ts create mode 100644 wavefront/client/src/utils/string-formatting.ts create mode 100644 wavefront/client/src/vite-env.d.ts create mode 100644 wavefront/client/tailwind.config.js create mode 100644 wavefront/client/tsconfig.app.json create mode 100644 wavefront/client/tsconfig.json create mode 100644 wavefront/client/tsconfig.node.json create mode 100644 wavefront/client/vite.config.ts diff --git a/wavefront/client/README.md b/wavefront/client/README.md new file mode 100644 index 00000000..fc4162c9 --- /dev/null +++ b/wavefront/client/README.md @@ -0,0 +1,54 @@ +# React + TypeScript + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules: + +```js +export default tseslint.config({ + extends: [ + // Remove ...tseslint.configs.recommended and replace with this + ...tseslint.configs.recommendedTypeChecked, + // Alternatively, use this for stricter rules + ...tseslint.configs.strictTypeChecked, + // Optionally, add this for stylistic rules + ...tseslint.configs.stylisticTypeChecked, + ], + languageOptions: { + // other options... + parserOptions: { + project: ['./tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: import.meta.dirname, + }, + }, +}); +``` + +You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules: + +```js +// eslint.config.js +import reactX from 'eslint-plugin-react-x'; +import reactDom from 'eslint-plugin-react-dom'; + +export default tseslint.config({ + plugins: { + // Add the react-x and react-dom plugins + 'react-x': reactX, + 'react-dom': reactDom, + }, + rules: { + // other rules... + // Enable its recommended typescript rules + ...reactX.configs['recommended-typescript'].rules, + ...reactDom.configs.recommended.rules, + }, +}); +``` diff --git a/wavefront/client/components.json b/wavefront/client/components.json new file mode 100644 index 00000000..c097396a --- /dev/null +++ b/wavefront/client/components.json @@ -0,0 +1,22 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "new-york", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "tailwind.config.js", + "css": "src/index.css", + "baseColor": "neutral", + "cssVariables": true, + "prefix": "" + }, + "iconLibrary": "lucide", + "aliases": { + "components": "@app/components", + "utils": "@app/lib/utils", + "ui": "@app/components/ui", + "lib": "@app/lib", + "hooks": "@app/hooks" + }, + "registries": {} +} diff --git a/wavefront/client/eslint.config.js b/wavefront/client/eslint.config.js new file mode 100644 index 00000000..fa9de8ac --- /dev/null +++ b/wavefront/client/eslint.config.js @@ -0,0 +1,28 @@ +import js from '@eslint/js'; +import globals from 'globals'; +import reactHooks from 'eslint-plugin-react-hooks'; +import reactRefresh from 'eslint-plugin-react-refresh'; +import tseslint from 'typescript-eslint'; +import prettier from 'eslint-plugin-prettier'; + +export default tseslint.config( + { ignores: ['dist'] }, + { + extends: [js.configs.recommended, ...tseslint.configs.recommended], + files: ['**/*.{ts,tsx}'], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + plugins: { + 'react-hooks': reactHooks, + 'react-refresh': reactRefresh, + prettier: prettier, + }, + rules: { + ...reactHooks.configs.recommended.rules, + 'react-refresh/only-export-components': ['warn', { allowConstantExport: true }], + 'prettier/prettier': 'error', + }, + } +); diff --git a/wavefront/client/package.json b/wavefront/client/package.json new file mode 100644 index 00000000..3d1e33a1 --- /dev/null +++ b/wavefront/client/package.json @@ -0,0 +1,67 @@ +{ + "name": "wavefront-web-client", + "private": true, + "version": "0.0.1", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@hookform/resolvers": "^5.0.1", + "@radix-ui/react-alert-dialog": "^1.1.15", + "@radix-ui/react-checkbox": "^1.3.3", + "@radix-ui/react-dialog": "^1.1.15", + "@radix-ui/react-dropdown-menu": "^2.1.16", + "@radix-ui/react-label": "^2.1.7", + "@radix-ui/react-popover": "^1.1.15", + "@radix-ui/react-select": "^2.2.6", + "@radix-ui/react-separator": "^1.1.8", + "@radix-ui/react-slider": "^1.3.6", + "@radix-ui/react-slot": "^1.2.4", + "@radix-ui/react-switch": "^1.2.6", + "@radix-ui/react-tabs": "^1.1.13", + "@radix-ui/react-tooltip": "^1.2.8", + "@tailwindcss/vite": "^4.1.17", + "@tanstack/react-query": "^5.62.7", + "@tanstack/react-table": "^8.21.3", + "@uiw/codemirror-extensions-langs": "^4.25.3", + "@uiw/react-codemirror": "^4.25.3", + "axios": "^1.9.0", + "class-variance-authority": "^0.7.1", + "clsx": "^2.1.1", + "cmdk": "^1.1.1", + "dayjs": "^1.11.13", + "js-yaml": "^4.1.0", + "lucide-react": "^0.507.0", + "react": "^19.0.0", + "react-dom": "^19.0.0", + "react-hook-form": "^7.56.1", + "react-router": "^7.5.3", + "tailwind-merge": "^3.4.0", + "tailwindcss-animate": "^1.0.7", + "zod": "^3.24.3", + "zustand": "^5.0.2" + }, + "devDependencies": { + "@eslint/js": "^9.22.0", + "@types/js-yaml": "^4.0.9", + "@types/node": "^24.10.1", + "@types/react": "^19.0.10", + "@types/react-dom": "^19.0.4", + "@vitejs/plugin-react-swc": "^3.8.0", + "autoprefixer": "^10.4.20", + "eslint": "^9.22.0", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.19", + "globals": "^16.0.0", + "postcss": "^8.4.47", + "tailwindcss": "^4.1.17", + "typescript": "~5.7.2", + "typescript-eslint": "^8.26.1", + "vite": "^6.3.1" + }, + "packageManager": "pnpm@10.13.1+sha512.37ebf1a5c7a30d5fabe0c5df44ee8da4c965ca0c5af3dbab28c3a1681b70a256218d05c81c9c0dcf767ef6b8551eb5b960042b9ed4300c59242336377e01cfad" +} \ No newline at end of file diff --git a/wavefront/client/postcss.config.js b/wavefront/client/postcss.config.js new file mode 100644 index 00000000..7738160a --- /dev/null +++ b/wavefront/client/postcss.config.js @@ -0,0 +1,5 @@ +export default { + plugins: { + autoprefixer: {}, + }, +}; diff --git a/wavefront/client/public/background.webp b/wavefront/client/public/background.webp new file mode 100644 index 0000000000000000000000000000000000000000..ce52696dd9c71b2ce70ce05c3988274fab3a77f3 GIT binary patch literal 62706 zcmeFZWmuJK*EY(eL%O6pq`O;?kcLUO(n>2L4WfW_cM6k6KoF#r?nWe~yJgaI-xFPH zJrA$MEF{g z{MQ?;u-g+F*u_CB?DyZZVYgSq{{=Z`bY(xYgUT7>Z`VjLSc!jtKWy)2(FNlGqMj+{u{Hj`_s2>5$|H(7|K5M`xQfc(*=)L}%Rt$Uj{@|VbpPa0zMh4W! z{t)eFsl1+YcTW<*!+FE7h(2usJat0|?9<$p1$@KbC)hNIVN3k4U;H0i_3(!_EyPT8j#}~{wHbBoeIdW9b z@Yb@@b{nomb(2sDIU8bUbPWGDeX))9rm%)KLH(`9>ECB*JfHMw(HX8#&o!`wJIWpr z`5WK7^Sg`@=8hcv+ssY5Jz~YH1a=1LxD7l0RKPbI7aM`Dhi{@%|Hmfych6^vS;EgDPFQ03k@Lgy;{U1!LNsuuRw4$keb=y;kh+}Dv2v!Tdk~!e z;^zNw0{COr-Pk@ZE`12HHzOPMxE*IrjO$D|Hsf}Qqoseq|wb-0{Y77${hir2~pfAkOy3{ zd6lFuEy4@=W|M%VoGC=;QrCP^_X6&)|5-Hr6END(s<=)!;Rz2|Z^4U*X<>8Jp$TH} z$rb)MI23N9dCzD?TvOYDU9i}EEY)lQtPIIv&I?ey^w>7cfVu9|`4o8qHuEA6rYNCp z{{!fNFN%c>l6|8FFJF!c{%z|2?4pyNTn4-+)W0F>ga6@4Mz2p>SO_2PUkBNx9Nt-) zE>V&kyG6MVQ_nyrPZnupYcX39#7IEZ@LnWwxbO}`TjG+ybwBqSke&ehh-hFE92BA! zH&H&kDDhYi1VZNSmHzV|;;mZXIrcw+B#!0^X%?nd>r@~ot^M-`F~Ka?hnjtcy|*1u zMj0R>Hs-!$DFYvit_4`;!&Fo)O=}|lxeR420Bqc@X$|SS{`7BZIi$F)ZvKby*>ueM zxF&u<1IiaC(@$^!c`obZqdd?&o@^O#TI_!D7kVyEe`MhR@+bqPN)Gdj5?MbV_(D}s zotxz{-Z1R=tVuexXounjqmÚM915gp1V{{i0rCqn>!X9W-tUXcD24RKp7@F^*d zJGT$wV9{N+uA#?=GMBwlsZ6eqc|1$t&#{JLZy^tOF4gPF_%>U1-vG=}xAzS#7I1>* zqZ)zUzaH93l*C!drb!r>#5C~t=l}QC@ml&{F|Pd|+{zRav;6Z7OH%d~@_?O2Y*-O9 z#fRwwJ_kUP2S|m7UyYr)dZ}ogU+E+wlz2W3>x4(6-~DP-gV^fNT!Q<1*E?I~j6URi zcd7r^8UH0v;;rMqwlt4PA9*m26#dlm1YK1it!x#f(rq3s->(f+pdv|TMM2)0Ba38=9S*ehML{?g&Vh3!!<#A>*R~*jnPN* zHy26FXeAGXMjQWAxEx4?!-ImsNSz_aqOb-yr*d^iJD!^0$6i!L3S$Qb1^2rjE!N6o zx0l!`c6sUrCss2Tmu>5MKqcIN^Np?bpuPr?acevua=+xlOP(qlkB%DJxmn_b0yuo0 z(1Ko}8UYQS{Qo&qe*aqrnhe9AgU$d6_iu%{U-S+^WF71x+lpvuDSy|y+Y5GqS~*O{ zbwlxoR9VfAVlQ!4G*Vw<7U=i)u_}}7vW1{AOIZl4~)MJS#}$Wd3ZV|2tCeRw+lpv=7=e$A3h;x36NYn;SgmK`&7f` zHD0IsIZXtpe`YA){|0i$kmngO3+xc-2z|@8wb|HO>~_!G{MRzzg&GdHC*wE(#@~8U zppTWArgd)~y=Cx=f_Y?bW<;LGNy9&IiQ|n=t&={v1IK8pvtW6g$F`cSWn3TVgJLDq z4~nJYJ^}z-Dy+j{;`vz)>~$K|XtyRW5}}F`eem-29}J^7mqRf4n{NM$W5L>Yej!v$tz4mNxmMzH8pFV9U>fEck zuB1I30AMZo{!ydCz4+fLgXk283gEtI4p1NdyYP8Q)FX%YPAvcZzZNThNx(>8H~0@| zJ1XYY%?UtOCD-RJykqkO0*~QYx4+Br{#Az@I8wo*V|}J|rjKQtZ3F?zcUFap1?3*? zpcpRKfaaXrDo_Hi4;r?l>Tk)cO&Cngk~rNf;>;8N4OxE|X@E=6KJ6V|8jT0?U~Uld zZ>L$3nIib!Upgoh?>$Cd{L3O?Z)V6&j)(x@V43}46LItb0Pt_yBI6ynLcVT)LRv8A zDQXJ;xEQsYy}3^-A4iF#jE285tUjrG8D5$EMPZXtM@{M`h&DxY)bP6Ui|Be@qthw8 za{c40Q79`win8$Av{CzhgUnyX^CPCG(?C$nJKCgTicNH0PPLGG`i>i5XR_cicn#i~ zw8K>g90+}wY$KK~+&^cZlLvZ*v~cLxU7~1d4*_c72$N|e2`wvmHXCLi9IS&04-%1o z1?k<#czyQ?|7*O!OW%LTE0GS9KSbn?fujmst~mkhnib2?L%nO@?v9(g>p|r)b;&Cu zxwbV0hl706mVq`Zb2WGAZN=NQ$4{OpSC@VY#KYOg*z9%U=y=!%&N!iTyo(%9xmTLW zzTys8UWDy>|5kaFtK0N}EG^>Pm!#{?0ABDnKgvJsj1s}YNL69sM_*F-n_|$o-M!Q3 zY3Ez@S06?nIm|WfE&!I6QA?C*?sB5LfaEg;9(QfKWuF@d+23`2xkj|61(^!^{cDit zGW^zo_pF;Dt-HSxIAB!tzvlFxgXisEXb5K}sc%~Lt^N?!KVJuppwF_;N>_J;{Rg_B{40k{|FkOP=O=n-)6}a3tXi8`d44Kt)`nCPEdTM z1wKCh7JXNLemC}0uZ3}JQoIJ5+5(UEo(%wOxqmQdTXlxSSvGgFg3u5krmFvh5#%*VyIjRZ5@Pds{)IIh4EKB! zVo}r3fI8jib&f@|d1)0^|1D_w)*XY_OGXKk?5Z|w)4jS+ekiwDUn)xJS6fy5jjHMe zb=&|x)aHg{gna*>X6>&NMK{P$xq=0F68_k1^Wa?^BJuA^9q0lyUw=h4U{OVKSYYB! zr0Ba1!n@++XH*Mdn#NS2y7k_xXE|7G@B0uPapKr}t5J*Y^@ysJ4hw+Lp^-)#V!{0^ zPt0F8Ee^#@ClvmhmhmSR#D~ejO%>5XLmq!|{_$e&yzxo3|5bU3{xVNY+&lsHe@b?O zoF1@|eZ}Qy3QzUEr;#*d1&SmA6DSJVTk}^=yc1wCA@#o2lYIWTPi`$EXwp&m@$~5m zqa#+n%b}m);B<~>=UiTOEvF_cIOBdE3^}bEH_ZPlf)vJ{Zp_~@S7W5W`$BlUP7o=V zZ5Ok37B`kxz&878Pjbs*Y7Y&iR)U z`ArM3VvFzl>P40gaKBrlzYs}4bC;nWp_Nlc%ZiN@2t)|fNni_hDq+68nM`ufSk+7x zvi!jm31t7A0|NN(7AgxvKVY)%+dpo(OZ?vw)k!)ztz7d$ioyWA1wM*|xDaxdnuh&u zkc(-q6`YnfECuO`%eEC^ctV6p{TS$@dW*iID2Zde)l)OH+vWin*xy4UyE0h79tgJQ z=~IkbqjQhXM&+9HZ+TsPo+#jr!*Inko_LbhPau^1DHMUve{PLud7wc6wdPJfk;a7l z)s|!`sMo5{(19aKy7xnpvJ@!8zXc{~7A4t^0{WZyNQM76#5(S^m{4qQmyG>p-ch+I zvJsbY9~(&43{3mj=YBYiKw$>sid!y9bj;R7S}jFD4tc1|bImPNBJusm++gNonnm@s zu9m?rm#`o@=9~bwjXwnfkS`+aAHC)Mt6Yc=y&{`z0eT;AOU_)2be1jG{FYD3{wVRqgOH23P6CS=dssG(JCQMPnkmlW;tvj|Pf z19Ry+BMb%HAY7-Z;3rR1f=bp02FKRzTKtCiyQ!tmbqJB=lv%JMF`?usF1TI}m<)ai zzty7P`tc>cKx~dbVEH#uEt%!}7doN&;P z`$iWH?0%zMF3eljVMxRk53sOAv2AgNMhr8x=@fNs>;`_YTDn;qfxVVT)0@SJhqJ^P zp|y)#sx?R)k6U_VHA#b9Ul-d|JuX#foZ_mDbsiHC9Hkk(n81rc?*C7=K821_nM+QH?}#Ee%O zdy#|b*K$0gpRQhqq+%GQ(i5>DijHz=QEjL)o>DsX<;w{Ybd^7kIhX*y|MciN?H@BV zt>1BjIm#x~)R0sw_4v%YvJ9{50CB{x#*h(cE0@0~vb8+|C=DDhIK24~wpuMq(#}cq zV?o0X8${%0_Qzw~1>J2yD(LlJn$=E&%q*-IQ!6ZFbhL14?ODFzVNB>>5Xv3ml#0L8 zbyMR#d+uc7=zs_rgR8)&R)Cm-_cuK)7Ssl)%J_m7Sr=Rni3i)mInJHPGkOMXRsW%g zkw#>MVC}+`@fz36U6^>N?ZTddaIKtqpd*vbpI8s=kWRIg`0(=y2bNd!^2tYOIqm*| zaS=^V3tFd0QVhgqJ=b{VxJl#zL8d z4McEYE{1*}Hf6=^DJu~TwRJbto}P0~jILMSQGaN+P!v9sqq?G4GBq>B9xs}saYcgh z;1lnkx}7cZ#_@L>-m-=eeetIq&dZpAALli%%ka~_L5$v``+Vv+m6BI|&0QkzW_$A* z_qmCtJhrA@k3a37QzSUSw|Ki7fK>b{=J162JKy^S8o##@*ppf|mB|BU<)F?Zp@;iN z*^A8l#`gG?J=XihTkNnn+}K`}W;GJha>tC&Is%mgYr-vQn(|tN!8-=v^1G0E`sMv4 z;lE%mxie8$x#Vx#S)1sH;3;>QGOJr3P}$AHB_`it14N)bD?$&Jl)6Gp>TanhP`)7aRq9ac`bXgWJ@yj=ET=PvE3xQWRwYyXw9 zZ7qgBdT1gx%yQ|m)#mrnxaa|(E4NZ8$*ucc^FCQ+oqO@^Eqb7VTyYV2=SR4);dohZ z2w%Hi*mD<-=5lMVIWtW?PGqX-&3!A(HeB>NDytp4k`l2d=w_c6p0mVp*yVdEw9S0U%Ib^uG4nLU_Xei~h2$WcbIIBinL8 z4dTADEE-t?u|8|QnXmiVc$(aKl*0|G{Kkfm!$DKlmvSUK>c?Et;M2c z#!f&FK=DpcN+n{N`-xb73h=Elv)Nh7xY9aG{rDR|N{mI6b@2E3)Tm3gl$y-k)qM5| z_f|drYD_o9hC~=crefxRJEC}*Sd7-OA?wY6uEdHG7ZsBnLUHHXaj_YW6T_;0SW{a>1&JE+5i2Dw zqW6P^+j7=Jiw^>|fSStfV^>M_jFE$f7TNZ^sYTC$WI~VPa!Qzv%e@P~X_}tXhdAvH zCuM+U7-$*qCOYueKAq}rUI0;4-vZwr0xd@JsD3}oGMo*bS}_N$GazNO54=GS9a*$W zBu&miOOMRNZS1$VXYXg#omOJ_h5hpDV?_VFux^hnAwrhO=PSWK?(Vq>S}! zzb}(GId<(4>gU=7v5IWEIfNtVR$yBo^~VE|fN@^2$TjGv!qzVUHC)r?PW@#u=xw^r z?p;(dA}v5$4^I|b(mB>Aodqy3*OPaT!>R3vAJ(oG6dYKfC4X_?is@@QNH}sz1Rr+d zAwtRLEPPIoyErOwQxvBq_(eV#&=^b*EU<~nRX9GBvLOS*OfV1n_Zu|bNdt+dd}{5^ z5qO9JW^N4d-g3AJAYar95Teg;KX(pCaoPff&v~Tiz4hRmCTe$CTDa5zD0M5QnDD>` zaGv+UIK)dp*mFMub2_q9RdOsKL}_tk1g)^o;A&hY`PndZC<7hMYiQ_gD`a9OGkkTx z2vi|ZDiMSGh&`W`J&Vp7Z@_*(sGD6fco#35t-%&{8W^Qg4 z|2U1>8&y~%{Dp14rQ19qyvVyFIBCpJMajv}{jjT4=1}l3fFJEOu4fx4pyouQ*h!g* z5e=dS73#-{m21R%uz6Cvgd0tJy3JWF8-2@8_Zm(B;Is%ZQQ*SHIb0_xyg`qOiT3h> zy~|n+a45r7EDv)U>~M`TtaCfb_NyCf75t)&^Bh1dfLg@n3CYM_Q>st|Kzy(Gd`gaM zF(ur+Ao;{x+${4WSL=9@QeWb4o~k_8Jo;7i>o_1}Dl1cAtldwC*ftQAH3&IjcH7@; zMO9v9d{KrEz;d{R`O_Jma@jz?0hG0`&+NR0frvkD1LVu(zWK@*jPy`)uCx(8N+ypO zw$z!rv`zHHfLj7@j1qtL2$fg$6Koqxyvo}>%z}HP2K?5BHybrGAvBonO(Ev=QYhw| zY_BD43d!i)x7~US4||y@@UA7SKVSt*j?Kty4=KfYW+`BJcKYtR`vicdIz(*?^OQx2 z^^gBnY42JmpEj(qYaksrErDjARWSn94Xw!|KBcj~8l1Fi6ko{lqqdoa(W^+I zAWRxQy}I|~zJm4qmcMYsE+$e*o!i5s@!{HO?)Fjp| zFFs-k00RDEE+Q~V;NkVea9|*JU^=s^Mu7<Ge>7XQz5cO7+{OA%A{vzt@f%|9Y(r zcr&WxgxyBD?%ffV^PJ<2%NGY`PseHRtp}B6_1s373*X>odtZ~5sy?fq_Iv+w#C_KK zUTT4ndGEd5Je%5WtZ3tPyE4i?Gl5>6@e041V#V9HfS`NVL*1RqBED2)<0+sLh2q7t zG7P-R#S-mYy=vrAn3(d|ztN)jG;qpgdn4v?P$i=Ti7ko2e_HeHM`)K@^+`dzL(D6G z+&=qQxkq1Dyl&8>m|VlDItRnJ!zPd#5~Gg1L-1Z24f0C`y{i;&uyk5Jk~v2H^0WD& zxi@>OT{N{fs_&sBc7FilSxECg-dstP$^cjLRgsg6@GXV zVGwDc_;OTzx*>&5)EE9>%%Ss{G~G9WPs902Xvt?)c2bJ zit`IIVsYgdRgKU9W&zNO;vLq7`XXmN2bWeVkFUz(ZF(!6>ZxMylce{16q?Wx*r2nc z7zlJ7{^XI5CjAf}yl~${MKZ`}6qP*i-&Z{KaeZOrFr{4)If5H(^5mR5!|Jr8Y0hM-t(Vr?i){gMw^ZJ53l~`y{Ch={Ucw5HY7is= z_DIfT`3ZZRfbB(Y# zJxy-Pe1y+$3}L^o`jOR0Ft)=un_vWENxarZ>Qjm02cw*g;$69Vc$~+bttp> z7V6b}(VWAB+e&OjgII_zjpG|XC}LmX|4<@+v&$WY|0$@*rVoJ8BGx{5v3X(i0|N-P zfX`*)3u?)e`u$LG&w&h5(uf@1p5168c(AG& zSn|DuaV03`! zftDv(lAwefZkv)B@VRpN-CB<0V|g&bsYgGi^tS=|CH!)BB6617R}7}&@Jc=Gg<#7M z%v(n1adv$sgwJ)12sW3be`3ogzAD3$cFGO!R%pT;{9b))U&Sg>{YX~BNP|9`&U;QP z)D^cv(vZHNB5ghKfduIOYCx`vVeB{ZI39W(Q-;MYdZ3S)Cq|nAhLJYDZp~tznVZVP ztFp%z8gM4K>9P!z$kE!Ln{2zL!9pgV<8N%tQg+E}19jS7?rL}m7KH8U5v=cbRR9$oAAWqr9VH z`M||_Sffv$`;fmjlQM^n1{A;c0dvBM0vJ6-`9Tp`?io1;a%lpgx&h4TpKV33;`x!LJxR6)caE$m$g(azFBL?*eM-Wh$w(VF~kc#g4K66;&VcIp* zzwB|-y{AdV6+B^5Y`9umC&ScV@rcsgQA`>kxRf^0BqIm1xce(+w8zJ33oKt5{W%cGkrSdrb~`)~H{DTxN`v1pqsy!uKm49a`D zAwniO3V75p<>^Q3%%=e_HQu)i5NKjZXUP$3$hgtVyin0&`DTYYDe1q;CnFbLRal79 z5FQgbC!rP3)bpmWVF0Y$U!;z^$0|(?>-CzgC)CWAYl^0=d2SZ2PILbgTbfTP);&e& z5}epgS9vd42fku|E|*?};+>Lk$FLURbGtC)-ssORv~sET^`R(h|>Jx|qK^P&6^qcCyspyN@#IY2>Q_RQW4`K0}MSnoF+)#bY$iQ0a+}yd7MO@U7`mNmh4*8N$B9wh8r3PTC zuIOK3VjEKHz5n3#b!COS?N6t2_|c-n;S3e7?wb>@UbT{?EH21{kmjC~n;))UZR0rU z3`h$CEo0P1TPVtNbmIgJUs5*x2phdsCBB?QYc)ebTq#LlA_37-GI!cCYKsAD;w`U+ZkciE5`yafVF=Ytv| zRJxO}xSvc_N-j!@bEaKxEw?J8CNLa%nC>RDsgc;JytoG9Pqij6Ej)z6Qm16*Aq*Bb zM7tXD8R=-pGM9hDAE14{7LR@JNqx$R@rQPM0!1GL1!f8sy*&c)V!sI-b#hG%1Zoz% zP<-gmCkka-W7N=qI~5O@xN{Y!90UknAmTtKz_PS&kRwWCGIhVfmHid)FtAzUq0q1s zlkucr0mQRJeU$a^+tpT-OR(YN$|CRpJcX3g72MJ)o1DL%6Fd|Y6B8Un{Z{hjJ~_cneOkckI$f#7Z?tyGLmH2&J`VePhKqogU!YWzMojQsLF->eej7x{$YpoR$GuJ zI$UXR{9!i4&GvvZ@H+NO14s}V?`(hiWaF)H2f~v~(VmdEst`iD2mB8{_xw)q~q2u;a~E z>{TW#)C1`|jX-oz-Y+&yjN^EuALC)aCk@+s&O$#rTDe^d^FaIY4pb^x7-jo6HVwci zQ7OBql$@FNdXf*1<$ghhReOA@cZ^(*s`++pPJ9WZl1q~k(fILs?(Ufbh~7KVRR)92 z>VhnW9yCwCaqe}HCBnE?U0kH6rdUqPv`W3|+(s|g*cLKc_2>uXk@9i`W-sdQFtohV;tN8V9Uo`^r<>Z4z20oG*HyFDMD*x1DChJQHS8*Lw@V1i_pfRN&!*7~*hi6rM^~ z+T>>MQfFM=M-2?8J6P-X8zRl+DUSyxPgn*SYN|2x;{)59M-Q7JaU2071#%)U=~N*r zsb#<7A<|b43{_e91_{$xjr){x({-WmTa`0VFca6Y_*IeL>%g8m0vkO-=l0f^e(PTl zGHk1g6TAuh>_3+lB!WHVcH$h^VD6=eTHF#Uw; zp;y^nT%BRF_9B9$2%Uo2&boImbUYAo@rNhBwau~XkDhHxqy??Vv_7WW@`k_cq|VP9 zN-Dwr%j{eM$#cF%f?Pkt4adhSp$E514YRC20#Zs_#CHA*C=%ZgO(o^ zp0zy84cF=^`Q{V+HXCRUPp};3s{sO*e~4aKb-EDO>-8k9TUAgnd#E=3^ntd&Svx1# zM|u)t_+IoCexrP*H!Dc*D6CtZ{=o_f!p4FP6Lmcu&ZO6tTMd6d-?euI%*>VC##nS@ z8hs-bhdr;miK@Lyp&!Iknc7l*?BR&xrhN1RKd6BC$ga5y`b8<8b;ET)_Gs+@Xu{xR zONJ|+g%7^AcGm?3I{ANq+19Zgxb=*W^yvEBzsF84pqYnY{<$H!_MDo82-A-k7t)I@ zo)>PYcf~^{jpl!Hcv7TNMZHN&5%`$hOipe`1E7^&OG%MI<@;`srx2}Ia{5WI7tx$< z^by^WJFy^9N4EVPY%rpF)#ZfYOl@&%s;sdpx#}uIFw3v(<6$-4Aj_bwlUEa9-(>1_ zb5zHdz3@&*xw?q7KE}Jv`}_DWY*oI*B(3PiFhBFytE9~@>U-AD*|&oDQ?B!}kCdja zsrpQnF@A-XpT!wJ#7+QtSzu?G9mlN)BuNAg-NbROq?djvhBW2*6}_kY?)5@c`a+ma z5c3P85Lq_&w)i_}mF`Ws!PCnlB1%CGzs6lY(ORA+NKFEm=J3c$7@W{A zTxF7SuEO9;(}0jPt)Nq%DKYy!*~S)=U+bYcC2^tSN6ozELuB3$hXN}sSG`0ncsOoD5outk@8kWU?UBo~H zX!1y5v?mH<#}#PEQv_*23- z6L6E^eC!7aIa>?uSF!{Yv3o?n3>Au;O&bK(9T;Svh8TT*Z0g)49f7=rk`4*9+`lgY zl3+D%$98+7K#5q)SXf2#nJ03CkLQOdenlB``fSU#5@o;kcaRa(6{igd6_az)AcLWb`| zdGpoXs_f-z&nnomWtQ)5E;k&B;|HvbEMWnA5QQ}X8<$1uyibP2nZ-Y*5UwMn2gHO7 zJ3hk*(0fo__W9CXW_n|BG!B@Y5BocG*2Uj9JgU|LY3kum)@}i`Wd4c(ALX^KTdqfJ zll>Co1+eb|ZHB#b?IWFFpjY=hr)65=>4O}fSSg`3cYI7MB-^BVu?-TG{yn8g#1d0# zUfs3|Uds;Bx`*{a!x3@hQ*R$Md&Q>@=_qGy0?b6A74~0pv8lgTMT4$)@na zjO-DvOnC0isg_3_Qws^XfN#@&c8<&&@4kC8ODbO%@#vACdC+|Bcp8pKP;ee7IavLz zUqgW&vwJLX)Rq24oJ}Zovz=DYbCUWRVR70vvAzykt_3!p8gAeCL{=d3#Rhi!b+R0r zvFh_#u$gk?qK^p^Q3_C)nh&LU0+ti&a>#1jK70H7o7E+K;2)0(^0c`Q2ciooKfq0Z zO~s}h=6(XRdlz1Tdg$@LjCGCnNE$D^XTIMrjgrbivGVRM$HadW8kW;swG`>+*Q`y8 z)c(nrR*A%F{%tKtR>54Hwm3AXB^MRDfH){C8X5T>iLi_uAvMJ}rpVN=QG$}1?9v?L zZ3s+oU2rw}h9Eq!J;`Hho0*@#B5C|= zReGX7ErW#h-aF|yfT#E|F`?|kTsLWF-lxzk&l7f|;0}TM;?Mr4lM>J-nxwjn(0sA4jz4IRx|DqCrDME~-TVX`@Pc$TBg6 zmNGP2vL8n@c{0W^bsoK~@8O+;#JZMiR7Lf+c_0rhtO#k8A7_4cb{<8Gjs0+;a|>LK zGI&SyGB3w$S$-c&kmXn~iTg#bFTP27lSUIe|1p5}em!W-80Cco8WL!3iohi&DY!bc zBf+eM-EpI~6v)%J#FGx+Himhz3`|ID$!mJ#?g=8x2~GAMup-(CNu{kfl|SQFW~-4B2exd*r$ZOn0`oL@8qfs|0Ug8}KrKHh+>^>Jeybh!E~uJb z-~cKoh#U|qOIw?{5lO!MN$!2wbdL_c57-Dsd+dEm4(kuInLiKJptM<#f* z0kpTkuyENvO*!OczDjX9oVeN_aB5tq4u}wD?53Y)7k5%O6g4%$8q_#Ol;3|?+ngp8 zslBch6`>6}w;KbFfCSaep9uGDrXZ$8w&@i zS$YRUH%_o(P_vmkr(8WQLBBw?!0D@^0witO6YMBoPfNG#uCD>&i>qUSsb)t?n;76z zI%sMD)a~t6AM%o+dYbpx`B`4+UIhSb6REY&ai~s+tCmu8wghf7(9S9H|J;I^3Q%_m z^AIF9w*_bUZKR61bMwex&Ds3ivocSvpVHt?(PbwvKS0tb*jHsb=QRLvI1K2j;0=NmH+feFwVo zi+eHIdvEI}78Ue@8~0D}A~n7SIB;B;sN8qoxa10h0IRcm$OPGLvwdI~w|QC++=;3< z=Ex0bNg~QwMB_iGZ|?_KaS2Q{DG6ZcTzsLCvdP+XiES%Oqv-1|%6>P$MueGH;<2Y) zWWQ&SwN0>-Z?+^_w_Ov**oQ+S{%ul^%Kr9UjD)M^&zh~sFm6w$?m6NRt*J6~lb^(+ zu+>jQvwDp*Y1C4^XcsdnM3f2W=UK>#_7@H!gl73a|g>Rdyht z>Q#5S>a;z6s@;EI>sC&3RIbx#ch)iINq}5U+gEK6`q;R1G1y|8tF#$Giy$Mf6=G&~ z@Pa^*)%v#J&}OL!`ym-(@RIle6~n$(h)qBi(gM<2&J!$8UJn9Ox7_kD1&iN9J*k($ zM5(IBBUDUtv;(V4Vh9`a{D#_)62XEH9SLZ1{-kNWiaxjWKE?V4vLN+&5p?8~SGM`G zNBN!b9*30R&|r3rM#$pg0rE~(I4cgKdJe3|7*)<^?Ws{^Vr_9@&1sRP7SIzl^NN4D8(t;eTM}56#)h&0g znoKkFwTD@t3tzbScMl&$5RjnMmC2c$m2dAc=I-yI8nt-j`al33$a zaR_e|4>o+Mp~2%2aNb)dA2trm!50LG?VW1~g^0po#p4nwF_q2 zSTHwSin-Q?+a9mwrYxFKGK!|$4=G!mL?8tkj*B*r4f6>WR_)5R5_gxvsvp(ks^e_K zD_lpp&O(Buk)DiwVMS<X0o{?2&&(|M9$@rCQv#px-YfP5mohV|%1j+m~D((>ZE) zJ$}PaV2#5XOL9(tWcW)pG3kn&EvRya$BckqhxL5G=#cEnF2XDjCqBTioib=hQk{yV zJVf@LFe>c~pno_0Bju&NlB_`*JGLdIMo4lXW>--*0CdNYuJMJ_d&CH`J1$LU_J;TC zSRyVB?U5hN(8m+^m1!Q+sjhR5D?_s6zt5C@Z|aU~cuW5!?+rEW;}FIYfYS%p{}7cP zpNgGk0hUZZH>g>q?#eo(94cy*0?qYb3qb=)wZL9S;k8_d6`71$JPTznFSf1@a-&;- zSY2QgZB6ShyUZxBr)&|gs-fyvNqCqn-{a1@ z*x$G;Buy{(@U3ZFub@q)zq}rjd@kHS<(sLVM})O;!~i zuGhg@@!hq=vn+RDwg~UN#U|)B%9~%V_r+auE@|REd>#&Yks%Pn)~Fmqz@0l5unjCx zA)L}~L@W!OqHj^MsioH1@ax&Kp@W+r_HwzcP*0p;)q|%A0y)Yev^R7aFiT@+DlK0) zG{k?@n*=}D-w%$9kM{MCpu+m(N` zGC;wcGz8stCWS}NI+3xJLrLKBeQ*Do-6Tu`a; zN?WlK2FV9XbJY>=7Si(c9ICYBvo^oCshunP8Feq9r^Z6z+csKL3pm2I7x}?u!z~cr zq^=VPJ|lH_a$pT$?y@TQ{4;K2AA-3^;z%W6Mx^Y`)Pvo1MV^2MIxIuRHTRskDG9tr z`*W#(Appxn$I?!bHi_6ewCd$VDN9$OZKPP{h#diBCLs~uRZVObZRsI`#yaBd{Ro^C zx5>61-J?#ZS9Mo*5Id8H3s@OAh=q zsPC%_)y6gukBhB&fAH?cD%JDOFwNfDibx?CrTZRUT;SQ0qi9TesN3^o1?5(u*hP8E zj`5g@GY<$J&EyAO+25vJy3)kZh&AdTYl+S4SO3|?JL;cYc=hbQE|?i? z-gG(IiGeB{RXR@7Q=XWpBdivpsRKfubGx+W7qg7mNj#8+OdRvJ+!1wy^!1oRQ7f<7 zY#`IG<{W+Uw_9If@kVd()t7+na$kyKt0u=w6ewkQv4C6tISEo*S|x@*@=K@pivCdj zeQH4ak{)k7OXs@e+;@Z+-5-{f>@$}b9UXqykyp&tNPz0jCCn$$+=C$2&e(phj)pKi z(|pR4NL{{0VQo{!1;@)H>xP}sJReX0mE_{oR~L$;H(%e$+RGQj&Aa{(i+w??Gz8Xo2MLD{ zsL=3gli-W`1Pt59Z$7Z>@2pNb*+;abG2GWgvg0R+FaJ&>)BXt<=*{K#y7u(c_rGuS z^6wguM+^uVGJ~e%oShxxa~r^QmAkQ#n8#k0d)H?RL~_8I@^!%(i%+u7V01Wb2t5?aEvO0*K9! z%wG%SJ|0}UO77$`kRUObc7m9U$lYh=pmw3}s%a5w}QC2HgQB7)Gh`@lb$qj}N zF!X>WmXB3KeKqG^gVsc%Gd3c?$`?>A8EM$02dN8qn$k&hmlN}|d2URfGZ-wtAipfe z32UP=@~Kkw6DXFR>F;KfC7?9q{eT>REBprK-ST!#ugH)gVI1g)Fw)pQ!>4iL^-|a% zU&iL%xBFHvcRa^%Y@$&eA`uuty~eX2TzP+Lu!1sFN9*Y%hLKASf_M0rcqLCY;F^WL zQE}C$xHEb5${?p~;W*+XsFKXPcC8Wtb5cy!5SUCSjm5r7e+)ik$bw4rKmfiRvqS88 zqZUJXlDLx<&Z=@#m?%@rMes>@(#m_`e$wlMOgR@};TKf7`knUK-%N~qa=C1?tqohj z=`0T3_j9ZHe=_G7VvH;BuL}wqPWBNaH>l^lMlQ1i3nN#`;{luBRHS*R?QAXP4>p?W z>S8qw*Q@>Yu7BOFo9sM^;^^l924O=K);dQ7R&N{GvcLT5x2EqpR3|zX>P)i`3BF%b z^MmtK@+RAteI>j|`*Az;M=|1>BQZj?hL33zNU92~$b8Yow-uRp8Vk$^N^b_v*KZon zfV+>0_X%L;Tsaw|Mv>wXmSSa{q{U_>(t&1~A2zoIx!X{HLHp7<8dO+TMNPMKE&ej@ z%EfHB{ZEi>jc?d$P6D@^Kflilu`v*wI$csiUieCRyC=DXAUGhe_|o$pp#WXg$%7$iQW?KqO;r&J|Ml9k^}% zh(NvLJ~5fS9wK1>V08GyFvc|gphMr}UI%hFkUP?>#_j#X8{-v0s*t6b7#tnF)8^-ax`kAHj* zcMA?-M{`#O_IintJ!`Smk1CLbkQQ8{dJSAsC+k27Z9x^fI_dfGpO}gAnG!I+mI}R* zs*RxB{Pc-7KpVrv-LTD1a;oz?p`Fc~cZYCWxs;5Au^`@s=>II;9 zV69hpIb4^~RtK1g{IYhxkuvPpIb9#G3`=IEDxM8gl$u_ij${)C zJQhyH{BpypzUa@-@VqO@kTF{ z8#6oA9uM32MVde;`jl?2Up-2)^)T7n7haXb4!=8)pxm^iIiBplXdhr0`^X`Nw>o{^ zWxCyxbXK2zo^4H}j$pX%IpI~jc$y>PGpjCbMiSruQ0_Ib?+DR6X>OAyQLTpW?Cf|C z7u1$cdmG$sV9|JLewMO&|4cIw5!vA(*}t(P0cf4Z%mG=+tbU`Cdmdd zR>)>alVpGEp{2XI_!!8Ek#0_*hg-zGv-oAHZpHoYRWyh>o$A`0p+(AT#L|JT1}B|s zH3Li+)f1#BKrcQSlyBi3sVWu-U3K-#6!#>foR@m(I@+I*i!i1D0LNu*3K}pj$=bg9 zH89x2U|S_KlZm}v)p5FfTOX4u$$Wp}n^eQS<}^hj48NYq=+M0L`+oTaIgf^|Ldd)l zD;)RS89scr85zXIb1FLHjaJHAn5ydR=MuAZo;bwzdw4-Ck*?Re6#3M+t<{M=OSLcy3!QLRxUrthsJc zhnQMUAjE|^0h?|m9n9vrgh6=aFy(P#4>HKr7i2u`{`KSItd9ak7!eB5uRcE*l7du} zjtSL(O5h?!JqvbFSBfS0*Er&ouz~#n_lCRA<Y1-t z-fC{Q~#YsoYlwRZAEMl64b zt|@ylTyVKG`}4QH;XcF9ABp#cKsK8qXkAxpz#7Lgl#G$4w6TN zdsY$ADMpDj%{2As1&?kBeozYLd4u);#nxMf#nD7( z+}+(RxCM6&?!nz5xH~&}zP;YveXp$_-Cfl)bxzxmBv03shvw81DV~nyj{9 z0m{AEHw+`%9{tRB@~t8xj`1D7Br8S`khN=}xi95-Uil!3<$KG4OMBU>bJNU+?A6>jx-&EiVma#z zCR)oQx*!Os^!F#G3!X#sd_iJ}VJx}T$cDB42!hKFEsM9KDs2gn{TJ8TEXh*mp?#v2 z7Dc2)U$@u5>!8+e9=Y>Uf|T`?$>-wUr+OYq=$~ICWO76tlh^f_JAWIoK(I40zj|Sm zK%|SV^JCAkHi)-KrB(cVu*9u8vO02{H(8~w%Q5-K$bEt}3Ve9$2}nm@I+*h?7>vKhrFUbpw@Otg%%7}ZvTg%8YhC(ZnERhMp~2DDNAC}nzojuy zm@-;OdE7g^9})9&Dr&nse%FY|N`q84C^c>_cVTww9l`GlRbrzxkrK~`{rM9AB&-jlVC%br$v?@e=-VNkoRTOn0@0ev@DdpP z@EjgAP_XnaIl>#x(-;@USj8cT1&MI?P=;5Nl@`}JpwkRmAZFiYSM&7{OvBGb*XLia zdIkwN@@~iA#ZxWtGnB}H&wZb*QXe|t&X!I@nh^cy^yBbnF6o3^VQN0@n-zEyj8??f zcjUgapZ|*#kACLUP;E}tx@7zpY0IPsy9W#Sv#WGFudUO-pF-OpZG!%{fhX6_-1_k3 z&60Nqr~D^-gSw-2W3VmgNY?wj?Fx*U1BpKd`;I!ph;l5~f@Zl1W}17+;-9|>~o?LzYU@_jL{&GSNP_nNr>1EBn2D&RW=Aa1H?*{aeJ zU#$^73?YkRn-G@v(cW6$t-sm7eb@5FtSvw*-5#C{<%||dQe~BCo)SC#G7U4I?=2V| zhGo|?c9{QsfpNTJ9Ni@CAj{jmW!|VXEJVP5ht9uYskphHRA!~jA&jw2Uo(y{SBnnu za|RlPrSu)&uuW}L8o2p{nFw7!CXL4110ljbMC6iDu*DwmzI zSLu?HnPAj!rmiDlt0<^u&XauM3&6``s!9Ghx7x;jUFayqC^RDn=b;8i%{3E|dqgjn zXwtpXlL*v~y-pbYyz^gu{?cLQg(ZyZ&)~3lMz=_&czxB)G!pxvKO}`l5YdEA=x6B@ z&QrdxM4p>ztXTw@KK^MZIS)hKB7EtCiu^LO%9056jk(C<<#$UV6{MpM3!}yL!6g(4nK>C*`Iigs&hj`kgcoZppf!MI~56*XQgw%qj3kw)kZdVVcsd$_bxO8fj%8*8{nF{6j8 zkKxH6Ip-7^%BK+W!>PH5n|P8Q#6^gEM)=?))3>?Dj>4kjJ$*t*QQfhy8t=vFH?I~7 zFxdaY1k%POYUwJ2|H?Qyn$KMe@y~_Wk%*#luj8w0dk}@uS9k=!>1!I$Ji%y%j0~R~ zHMK?Z09LDc_1x)%Kx5zVU<%abmZ`eL{S{qMIWt2X+(?>|iVzkAr&WRI)0J2`Zw715 zW+X(W*b=V34(+>8Pnv<1uO}h4)RvTDuI&@UD)x~iy8JrqdCM*_$@bz;eXV2vZ^l6E zS2WtcO9{gycol2L`oMl|@!k|*RU{m#@X==LNv~Be6^uyWUgZEZ+ZVODWXI>*7quw(k}y_DFu!Y==whm#!L_Uth9H&( z1u}+raR(9;%O^?i3$YT5HRL0h;+`QMLiO`EmE`FM#8@f%N8w#+&DagL1jOPI5sTD& z2BNj>qs7sjLETrx#ug4TAgtAiS@%(Z0bN8-6UE`_s9^Nsu_(NB0B`H`cyWHrdier# z9=-kB3f9+)#?Wl93hX!DFFBmHN43xqJZ|$fyBtD z;&hj!kCm?Iqq2-bi9>h<<*xFtqitQ=5$F>y9v;7a7 zXA;dmldD9=Sf39)lxIfyM*P&WZUMQ+dWi0ydvtpjb2s4}WGyAIdQCOEL{y4p=xjz< z3G7Vz^ls0(_tGXB7mS*^=TJ!a1};f%d`^A>=ulPjdOsY#W*2HYxLQvZvxq$uIPZ$v zorI@Bkjkgylu1l2ZiJm6Vc*sY5)(T7VZH@M==^oHX|QXiT5kxU!P0k`$?FXtdOcf6 zEA}!z)lzV{0CDDJ{e5C$#J^cQW?feikX&f2vaakjvd%$(jL@;$ zvQpk=Q7okh_4M?aboOiWs`N1509dxm(O%}A+bG}b>IWG|uIy9k%V8mY&YBwDsxJD%vGlB~M7s{V`;1D?I~#^|86WvDQP?~;vH5Y3$T z;R-Z%Cq!iXjmY7r9;p<^;HXs8q4tk^F7RW#;GD4Lc+_?7qUcACa=q-ooZ8%{2$EhL z=w`sDL8-6%6*5attKr`*zmv+%EQv#t8`i`1?B=LPQ2FJ?$_V>|=dQJGmZRYxWN(E> zlU!8qz11mXZOMPyB6>qb&n(7I>kd78e=95w2hG=T>AevBb^mknQPYZb6v4+y>#L%y zPn8}{o~E4m=M0Qe!%~YK>NRdJX?v}6?HPh8D#?VKB-1HXD}o2+5SvOd$8TqP$S8>h zK{eP#X4+-9Y}@RvH+qi<-)f3vBictPTYISC(wE}c!zqeGmjY_mt7Ju&K2(nNOYyU>9juyKR4{3qSt zq$K892~#Xh_S1B@6Z`+p(sH&bYZECLB>V^KZFW03@bP?iSG{}T>AtsVL*_bv!{@ijS?q&v zb^XT;HS7+kHPqEQ+(*L%Ja|NXK)|qqzwa#dos)-yWh!HK{gO7FfV5VLJN#AS zX(o6fUN*)oZ7mW8VQm`iv(%#1>S7R@eF0Qk#+HjvOk;XtgUpoEv%5jxRtO#m6Qg_3 zr&J@Yuho^#hPQPmc^1kBN*;oF2+pv4k3o}ldj)aIIdX$Dn}!TqcK4KfM)e|)vAy9r zbC(nE(s&fa?~TRv0u5!%!m=0Vll;UmyI#OvtNsJa+P~?Kak)6NFoKRwo%K6+J(_PzPCX7Z*b>6K8t5sNX>>EK!qL%oDjSW*@2wvq~t z1BPBGP^sKZ2N`~%Xe_Ldi7G0_HWo7kNshIxo1*ptnFCO)o7!2}f8zEph^3?Gr|(Uv zCX0UP?0j#YHi884hWNsJZ03AQBsH;XQ9>RQJ!`8~sC&Ryua86d{_b9PQ)Hf@Uh6zq zGEUH&>{WuI-&9mY;;?*i|DKxI44Lg~f)-R(%8)f7uQ3?YlUZF#MMLXNmA%WQIHFl8 zNgY2SgL<2+*0Mn9D#~9&Jqz~3SIBH8sY#l)xu3jNG>slNNnd`7L++Ma|Y+9LhfJ+Il7D)FM)s zJlgDi{aQLYapMtP>htaL*BpsRo~fe4pBN!QbacvQwwK&HDh}4P!(8P%WS$?zHE@$( zMsK2fP;yrcclrXsV87~-*{7?*B#zcRVTk0wc$C5>DOo{tq7f+396X;h?##NMWp3VM zyGO|aka)RAk?+3I+Rv4aI#%aRU1$n9zK&dOB8&XcoI!P4ePYE3#>j`Clx)%(Ft6He zUkSR8abpP(jXY+;6OMB|OtH7Wx9f=3#whl;?g}zc8K>y+I&{t@5FU)4&XoN;SuE_2s<^DPX7ri{v!#;JvCdWyPV8EG z@eAH!=Zh$#nd0*;Um|CXb{C@i2Tc6R9;v&(-zadUFT8S53U-QUCj*Dw)J|r&g3E+{ z4M{`!f{gpLYC~nW9a+3DF+bY6cFn7JVSIh7juzKTXe@dwk5t$v{x`Aj64U`THqiNx z3TR~Vhm8@>Tclm)2Y81c6_u4`p-XWv%m8P{mHZn83e1?L<_y z9!Q$rYvY!VbobjuDMx>9rpo3llplhSC0oJA;aNm!WOC6Tc$g^=X#jHE3$m9OZ~H_X zwVT@e{1Lg&W0tSFo_H9Xe7)9LnRXVa-wY5!POq`#yXH zO*ZGQhC}j-UZMW);_g^IPFGf8wCm&w1)if2atwFc@>XsWt>#40n+4q(W+0C3$JG>+ zqWxmA5{l&dBic3y3wB^zuW&Bq=%YiiPRYW59Xaa;BCp9C#17H%L)k+rM67O?_Zbmv zaH1XK^hAb+UfJ@R?jRyGn#-dVm5+hiZyf3X(DuxWOzor1lBHe*JN zCDI@crg)Em^@fa;!ulwe;=Le<_er+!_!=h&R=0gRe_I2{y=D0hjEH>+P_ZiaHN2xJ zx0#vAzx*Aq!(ZN@o;9#x;EK_l-8A@TF**B=(d5**glO6WZwc!@Kva|L@8KcXx zKAYcO)ZYMO+a3`k;v;t0G1jLl`l()Pws(8PTGJ%O9Pa85?w%dhU4DsmrgwKRtrAdI zLE$7&CWlqpnukndG{jv*5C&ZjoJxYQtcX-yZE}c{wSXf6^`i+$92d$-E}40wyxiwznK;O&F5DJX#N^b1wJ_YXp7j`^?9f|)LYp{ z^F2RVr!2`24sSAl$KTyO$k-uL%|r0kdKv&;6PXzb6CqmlIV*9DK@^EBc^|7U@OUv@ zKAoe;WOM$T0_J0e@GnmtGARAIL8;sU+*9MGErfsUH>7SRkypivg{+0-PA=U+@({uV zl8AMuf-mo;*oLcW^h1h*A^q{bEnGyYffu5Bw3cl|_+MRe9*iBC-lMs(fN+E&_6XezeXxsY zd*hwJXjR;H;Yw>C+Gl*wBo|EXHk}yb%iM#yrb@z{@ZuhWavzBPk2oY7haU+__$lFc zDg>3)MwThyl?I`PSR!v!v9aM@ywv#5^(XM-W9HZuH0|-X*Mp>uUWB^@ChR!l+6?3D zmd}5_Vzo=6&S<2P+Vc5X3O^b}Iu!(!jWfn)4WT&T!ce4ye77^Tqld9BmikBT4ppC* z81DRrFCI{*NYKb4oqw=pk6adTv={KgYJs1azB1wHn73&RJyY#MG8}jR^X0f{_RZIh zm>n!xEn?@$x_T~O+x_?6osmg z3b!A_?;#0Sb02>X3Uo8cp%qf8n@0dW$Yd%5_)7KQv2`ygII{<*AM5xV_~Q8!tUH&? zL&-wInu;(w4lL!w)xQbgWL7bu<_9EEl3fN1KzeHs5NR0Hd=Y-uk&=%crB;L2U>SU- z3GJX0`l{CeyIgSXP)f847HC=|?GBWt~W zInL({Poi&Y&TS9Uh@l{N?xx{`B}^_fV52?Bn55Y4Q=uogJ%zUL=%)djLjfYCn7k7 z)NAV+f@I7o{>YQQ`KF@!8J9xe3OLMc*>M)w`owhGEb@cR)L z2Gj!9369rFC}ipnI3oO@Z%jUkNIAd1gbh7^GykLg*wXf8KxDTCo5Keaz7nHDt*Ncv zecL1d^+sC?t6tn+AahZ7=7>t#`#1Tl>NAU4X z%)=i1;3Mkx=e z%}iJ@P|+%PIJ#Vzp6z9s?j&@BL%mfKIiyjSmPfUje>jX*)iNDwjGYdpzSdny%_K9 z5IB8!j^VpL0YAkH@}v@?M%ju4G1Iy1Tn<)3^p0XRx`vtLCr~gCHzE9 zZ!YC6VR8*fdMH+oK2&^fhVmw+K4m21d6Sue<4)J* zPOJyPq`?UOpPbB{p}yl=5HcI`?{I4;?u4V$Gi9sDzp4LRHQ8g z@cpemy55&7I;qq^!YQCu-ah?F3!m`fKU^xQpneN3rWiV-tCO4RZ~ix}L9av8T~db$ z;juRkoNg^l;>%*2W5K~quV_Sl_1k^=w))M9IbodIHv-LMK_F=oH%$Jji53dc6aVe> zx@k>=LnR}J^c0&fQV5)aUS?v+PPwhyWsZw$hN{`BlGU`_^U{b!ux9EaE5N7}S85*# ziOqy~WNfH67^7y;=8Ed7J#+wdS5)(f#fvv3tvH-ozrx^s{eFnMBj;1X;hYubD1=1) z@+h!aq^tmep=i*v=XFL-&er|*+yTN+s&Ns!eYo`!DfRLJhaLMg*u zWjYEgcIp);VtCzBOR;pTMLwIzPzM1_RTDY>>uFtlPeasU9`bUriyLDf;A_g#TSBsB!kDThPnP2inZ%Oa;0|+3xl&&Jd(t zMO{+K*O5{i-sCP#lU~O6Ij;}H(aqVJ-jH3!-UX%r%;)WnOSo>zTy@9Rp`FE!^!oSu zYusGHDh(^IS1Z8wb1AFlf6D@Z_e*ggNeWO$UKktn$nJOe6ac#2wgyu#gi(tiJtZez z(mCw=23%S*a?fwMb|s*dzG|rP3$Eg){I>5Bp948e3)Cbam5%pHDKc4Rp+_k}6o@r1 zzsCl-zNZc-cX-K*F8%-14ylyNdI|vG%7D!OisN;6wG#1xF@#1I-B-R(dn_5SRr2{CA&^D-28=LD?~LIn00P8XHbmXg z1>24l0KlZrn#5Pb+}H}ngbJtqqsAb^dGGj)6`_f5~#QgpG`o;bC!HV`^nq~v_+_qoD|*llL!yz$zX+&l~>^q1u0pLw1N??#{fl0g#>7h3t_& z?4MQ(vluGslqjV_g*apzQ%Vhk@^$VtVu-i3GG8oX3B^(rx-0#aFi zsI`@(D+UjtOOE;kKxD%n{HMadJK=ts)&ekW#sE+yHT^yS6kE$02GFy;Q(+qaU<7zR zUBz-F=YXVA?|1H2#ZW_C!jw&oRpE?0s|;{M2Q}8C)Hcy3F`m-kzTsDgacwLy?QfAL z>#~JG&r4sTP^4$s%)^B-#3G;|A8ZMKI!2drtNuTd|J#BXW51?@VOVjH383^l!2l_) zx;zGu;@!xt-^T;20MK`bA%q8hHdzo}Inw%(7|)Drc8>r%(Ny1?5Ho$sXJopvt-!}T zW50|v9F2t0CgP90R(`RKB04p43c)OoTDRQ`J|i_X7@HO&81i9rwl@C3i+w+*c11xHxHYa0+gwuF&NoNpsBG{_ z)1PAh2p4PbcdTQ9@-kC#kVi>z5WsOiy7AlQ4sn(d9`#7I>aJ zrQovhEGqgn5BAaJ^`Tr$p$qNR#ULlhZQ1ND)G!Xc!$JV)I(2!v5Br{p4_s|S;eUEZ z5>8atSDQ}2jWh;0Y#+2!rP)^vOjrVh1W0GRp_>&qET6roF9v|oPT`n zm0bhPB{oWfXOQbkxAEHTtS3m^Ge>oEZpJPJbNX;XW+bP0jDEX$<<4h)ocm9+|2A0D zXc7Wq03P2zrJxHUiZ$>65bn>F3lbukp&&{FW)076L-+z6@%M(9u8q)OP)w}9yXITBfW3sO67i+JQ<2FX|ro#e{lx8`;a;P7NR?l<|u9^C(H{O4BY(vs5$3q-4| zBp$7lqPTxe7YMu2AQ~2B;2rKj$cCAe#%Ig_R5qmxRxXpK2QmS=TlLV#>`2j|y7}dL z^$z--3Lk*jud);O;DIC!>SJk;2{9i>d@p6ppr3Isa1kg#x|dkvqo|<0RB^aCs9txs zFM&9Jy7st7(qj#Rr6NaIxvLcCHnEcMNa~u|Gb%2MPh`Y+|B%-7q+0zXo*OqxnF^7H-E##$a5r3HN^WMnJ-N2djZ1d-ZV38K+N&J zwZeMAluP})7Yq7OM?6-6VjP+_3}=CoTJ#Bl46?M0vM6IP-qZj=q*&RfGArN3DC+30 z|DzcI=wO$|0|g-GQ`sFz5lrENR{S5H!KD$#teHZ<2jpwUR$v+dKxx?LWsHAu@RXc+ z7AvMvkzg_=B!IFF>c#=GZes#p)*n8x36^M_9L4JKPgt(sMt8b9Chk0VCRjO&{u~L# zhU4L+3NIAAsF6o^GwlrZ8=^Z*>Rh$3O~N^F%Sxu>LbyQ0_KTSZ3%pkzx18$rA3Ug( zfZS*;1I5THCN<7tq-s`qwhD*cufW&uC7Cxb?W}~W*~Tg|o6eXp;0sw}ETtb;oFTjN z=qD2+tL{Zb?A-NZm0~pDldb?w0p%t_HslK;9+<6PZaT&?09xc%h>p+! zNCrR=m8h<*`n>$wd9KO!Y&X28K|ArFX$>-Cr_r;C2i)ght})tZXiEM^!bx{{^%rzOPO#OSig_p&A|{v8?m zpTt;bRfbOPT8+i2Sg;Cx*t#{uz8S)7zvVw;0w8YI8gqz503gkzopwcYVg&;LawC)# z0V48+Sn_}#XN(0>*81o7km8U-Iso=756uL*X&fJ!EEqbzqfM3!itw~S1x7-fDx1{e zhH2ORODBVTl~)9b!hsWH-2id%5BDW(60Gm2b9j(UfE7m)JM$?ZEY)Hr{MET z|>ju%#f-fKrjB{>zDM2?sU`w09jy9Zz?f3k8-zG#La2vKJl^ksqx> ze!Iicn4)3AKx%%y=(ouwuAUN{Lqe@8JHw+bPi!pxJEZoa?cpJBu4-sr<97?9wDz1) z&r(>cnQCDag0p_OEpgTV;{Xx5=Llw^08p{evDp@Dc_9}9Kzrr^fPUvip+DM_(kxWu ziXA&A`GUN1o|ZmK$G3XiAt~TP)4;LJ_CdNX;W8Vw>VW{?sEvkGS1@WnM;A>$TSibL z2J_`K!&fpC#EEVLtG@vg&0en>r*%qAtP|ZP;J>_>y~iGuX(^|BCr>ZfEBWS+@+0*6 zeeIDmMZ2V&t<@X?q)Of1Ae6QK51qX7>*@O11=vr^cj|g9)%$Qi>9dH|gE?fx##wM) zg-CLfDG>QbiIgV)sSdCjOruh6dAHyC008^lZ3zMZ-I0=kE#j!be>yAo-L@Iwn$NH5 z`*S*zG#G{DXMwz6yCymr@(Md60%jv2W1i`})M|v&>S~^C*mtWwN(=-FlLk2RR#L6h z(IsL0Rp-Q^gA!dT7()+C7wK*5F?c={>kT8%9-0gZ`M^!Af(~QsJ&=IBkmmlz^v^%-g;t4!$Wls18J5saNK$tYo3_}L~|e( zD>G7ZrGav~5PzPuE4!4gG=;=}UObsR)RBT;)u1oZ@kxWv8OEz%`A`QM`wO20$RjQ; zoR6Q>{r+I>b>f7Y=6ic479NetXH#y<*!PHUK;2@j{AxFRo`7t4G|vO|XKKYj9B!4R5gc#7$*G?jf2_e}8_~tlH7CfQO&p%y+p4pQ_Lt|m zxYZ7G(J#`igkmQtL6H#2A1a@ZRCg$9(tzpR%rH?pJdKz7rt7W5jkvX8{AS z;S4bD7O|l%F3P9Gw#K_KiXSAL)8ut2FeGB+WE14~^rH=Xmf&s)Oqwixs_@@+kxUT@ zEd9Xe%G}|yGa+Wv3rX`7+aI&vuo2@m8(B_k=%h9l4pV$9aV_43nJM$EJS@{W{~W|u zYQUUTp_U&zp+H0{#vL9AnH92pLOvb;Vh>3NKQ7ucjLlo_#2VDJsCE!}!Aa5%qJasM$DPVj z_Gww1J}pyv*)rqLnK|g|Ri_(2#8JMIc_fviwkJ)Fq3I0z(zmN>N>Unh(RI0J^6&Ch zzdf@yTc~9E(G6JQJmaHji`_~X1swz82+&^bHlVbK`N49ukjKz!&l7hJhIjVf_xDM%66ZWeMo|W=t;mDD_ ze%e1aj8@X8GJ1Ug(mMss?XyO;50L@oE0unGdP-RM*M|g6NIMp> z{a$i6DE5>lp4#f7=?B4NNXe2sYeaKOQ+`*>vOij**kiECyW_QhH@u?P+pi9Cbc$f! zLbGH}eCPJ{YMbW6q`5>);@Mxuf09$)KO0Di@+uy-ONY9_DvGY~2ccVW3VDg``njvIg}q9wlZ_wKL1$LOo{de{>P9#*)^mTZ zqYF=PhJ^er6LMUrSV|vN&{J-l5>x~78!+Ox!sAQlwT;);HD+=!~)cm_baq!AbJuu2R7cZXz( z&soIG;VaxF4u$kEh=23SmRHX7PKbbA4z|jz-v@0C?ZJWE3kdHgI2A&qBb^jafGLEt zFo1u~XNQw#r8*E4;Fe2(+3{FB5;1ss{%oEwut$lSj&ud{Eh5TNH{==paYbVDVtXsV zm28>UZ>V6-G`ds~cj#5cM~buga~sJ=S%0~Pl8W=)`YG|iw33K>bUV)dNd zI-|9oeP~e#3oJ!WK6wQagclNM1rS(DtM-`5C%g+6 zXpE1oet%r?+&#wq`5Zcn=VWfV+8e?i$O4ONRqj<5);7Nf1VFu6c@}0rU1XKipxHL= z;58nS4UMg4YO0qgN%V_+&wLI}NTR+y8!JG*Kig~;e=`>xJ>Y9H9yg!4Xlitn;)=8M zklU8z1W(SBixIL2_>lF42{~oH_}?*Y0w^DSyEcHN_>`z3s0}NtT6j|-pHHQ$Ck{ar z=m(nP2uDbwAkN{xUyfI(yV>zWQVg*ECOp*UJ*q)}OJQm{6+26gpvGX}81!@2OHKXD zg(Wrio-%Sf8fS=4>fVIC&>lh@p8j8FQYFFrmLz_JjB5eI=`E&Rolvszn-5-Y$N?wr3pkI&1$)S?-Pty`UbCg zRoAbpUyQ8)^m=I@1q^z#^xn40xBQiQvp(pfr08y&nw@HBocDpeq~?y_X7zc?aXNno zf(tq+*xy-?UCTbvHupRbQY9{S(yJ}VX+Yv(>l5VYRG{39Gb|wyw+q~#KhE}a4rk&i8ZN7_Wq4qDl!%s3Jb7q@_X= zge47LF;y6ivXs>0#C#p`Qk26F?$U*>V|{A!rGwS)Z99b>hB#nF1Ad$*AQ}>4j1kHP z*7xvfynGU24GhpdU5?gY+G!#&7|05pDo%t z*4F=-wNarb*<#7n1Za2}nEX3i&ateQ{nKuzNo&AmLeJxKy;GX+D6Zc(Y*d zudzusjqcw4eIdQWK6Yb$3RD)>WpOD{YZA87tv8!mNqv2zK-qJ7%$Tl zy~AeVT%2+)Vr<1!dP4)X9-gkkLwTD_k<3?g&zp%!p94pGD`eB^A_zj;_eFjvYAmMQ z@-wSCKNb|uiVsc8hFj9*UrFg*($ZxOgT}gK%rhSyW8qcG#x;$@H6W3P^WSM)UdZ}< z=P*&s*OaXQx?uy-r(%BiZrEhbFPUqXR-*g@R&K(EZDu&4HYD3?Qhqs&0$6oHGBAK* zYkYM?tlr1RmMK6{UJmJIa#2LDZ5y(a-5^J)w?o2DU z6B9FJ7vD;Wc;fH4Ihv8m)M4cx-08n<8saVV^w;Gf&TzJ^)WWEfqItF=A~Xor-t!?Mk9hIokIXx(OD?U#o4sIt#&_3M6byHl!YLFVWFYZ$;xq~c z(G4LFISSo!*Yc@I{2kk8lUI6C52LBh0xU+*31z21BprLM=00T}?$aPBHzHgFGY};S z7<8O=!RBwvXrVSwDS^Qq@E1fv0i*1&(!-Y1wfYtAj_vcrAK7(oEEq(I4Q(*nB70cX z#*aywub#!v7>WWCII}!4YiIH;Sh_+5x3+74Ig`>Dq|q8D65&pDQt~^ic0wJG_lkxK zM#1?5_1%%wg5$dcYP?;=P^P4+G@wv0KHe3zhu=55u6v$6+j!^tij{_FC`-^hvur?t zKjqZQ&&IK&R+Y6{FEsjJ9JSW6x!>Xtrg?f}i_*BgT5r$we44&ahoXg0c{aeaF0)v^ z*tbADx3^n+VTPPOLKt(59RUDybIsRTv3<``RRIJ$9HLa!QC+$|;5GwY25mJHiZD7! z>G@}Z6e-(myI6?(Ulw>wUaEYEs4ZI6zW_H59y6pQej;=$@C!_~{v7b=wKiL~#u?u|@pj?cWbjTPB(K-dl*`^E#_`YLkR za-TnOQh92Cy_4|vi9*u6uUZ7dqE9WZf~%u9qoG(1u+n41tR}i9jqAmllNSI28*+B1 zvbhE&ibsU!V8DP9WQc|e#fc_ohrj@?!_N=V=)zGb`Ie{$%l8#tD#NbXxXM*R@ShNI zVuq}r*1jhvGxZ)TRk=0rQxJQZ{TAH7(p@8pBysU&-?P-(lW>5(I!=k@{FU6Nxxp;- z(_QzZ4|cQ+k=NHq=eZZWh0ukooH`rb>t;vCdqkl2R3DyJ@5CE%E02GsaTFp_cIHQo z{7KCWxx%sNM+kR0y;hW0|C)YVxr2gYaYf~cdMS8p59>awU=ov$c%*bv$_iDJG>?|t z4}7JC>`u(4c?vPfpLk6COu}s0I*Lw6#YWH(YWhkkP#_xmm%Y++k=-5IofM=X{pxu5`O&5*W`x;{H*dcG~-R^GM16uv!^ zOf#kF3Jj&k{AYrgmbftkErbrfhm9>Clop+j6lCU!K*piH_Wd1cw2|!RcmwCRx^~D4 zEw5rmSER~QI;H}-_2Mz>{tR|YebATgao)a_ICQL zSZ?$~C(<<}`f~-^h+S>iIWJ~q3!Dy4c_VB1KF!x}BP25Ll_d-=de-~`Ct8UT1jNm? z$!0~5{wdO-$Th-#Q$DIHQW8Q;eyfmXGeEwxeM!-WYI#1lLBM2O@a9I7i_trC->H9> z<#%ty|hNm9mVoYX1&Ne80}lcwPV zj-FUa&KPU)RXMi3w0rzoC1IF*Fh&EW=I@;$wxP#DWX4TBU9(^1omY8z7&u72Z$mjU!Kjnm+vE;4(dwKylcTJ=>rwc~LU#(q1^8H|uR%T`48vKIy_L%7?H7Fa8HD zqVZXH>YZ`?*IQ8528jPRHaO;ohS=IV4PzB#unJIk4d;MK+3m0qh;*>28D>xWV{QS> z^d{P#R5^dXEt*&Q z&7*TL*v8&Bsg09p$lBs?x#fBZ(fPp0Um5DM#+V7W3={m(**q}xupLTzJ;-V=$`V-d zw!GE$lkN3C2b}ubR}!nq5GuNGh_<^bUKwtb zw5G6WQ%DhM>waeP%m@L#kN+0{azKs0&$7r++Nhv-$pHY5jld4IRhO5$KnMzwn}rJqLKHvOFGlfspg$YCEXQ2FPXxw2l0OL{8@3lj8&cq5{gOu*mTuClbZ{p85EzZ0!@D<#Dw0gQi6Y?L&I2LuEn9xzA|k`FiH(~;(6;(URBfT3U=}*^ z&lJ^}*Kz}KEpfnpF}E~&o*u)y$O!>R(t<+mw92P%!J4UiG17Zr4Jcxk6H<^1*Hzdj zARXn+b=@ilqZyO<>dWKlMaH7;L+9A?F#8sgye;~r>xFN^e0$42ap*=cez=8JKfSC+ zCuhV6Xc<5IiQ>V#vImauo^Ov;gM4W~t&`4iqW~U8?^IH!QHon5fC;}hhMgBOC|nTt z%!=+s?>Wa>Ot^pmHVdBh+w1NNQ=xNIBhgraXpJLMmCjm$#YT9|hg+&mf_K|@MU6s1R! zh+RtiQ{8KKhvi*blWV+fBPN!+7|SSr_!zE<`ftH=aoGO@cJ>=hRa%?fMrdMe?FS$T z2^#N*wX>DYh4p6s@r|(Sp*w)zw9`e_=br0dl8cmaBV?ul!jr;81=6wF6i}7hTZY0E zQvf^fTwXU(Sc769h&ZRMu}Z|y9BJ3|sztxQEm^U|0k@Hi#qEMx3M>%-?j2}C_k;eI z>wsxUme?*z9>4<)c5es5b6l2cgcHDt4I%&{^}jy*R5Gm1Wz{yTW+vzcKMzNX7vA`F zLzWi1Gr{W;fa>UIvs}2HgYggtvAO9#C-=sTmM6SG>G>VJy8bB!_+nX8_*)0mW-$GS zNs8v5$BVO*W;1FMTNb;@Rpxvc<))kEN0+*vh*zY0)MTx#WIR}mk_$USE;u;yWg;wo zcVGYjMCyY4b$|l7l3PF*R|J+i5UE^sH0AFf)h|beAQTVO^(6|SF5q-Y=8A6^8dP|Q zQ;ARn_0$8=r-~ee6Sa}wSO?rc0oo#f^RcN*YHD45r+_>g$dw1gnlOBCOoq^P6XvrVRVF7KzcK77rwa@iTQvyHLIrQe@4a?KkG`uGT~} z4T!3HI@)k0dyp|rdLqEoY?UcApb)n%h3mXU9*ql1&`Oe)8R{MnyhgO1`W)*X(nXUT zxF;E+-j$<<>ubrkM!+6ab5X)e`3?K}y0BH+W7P@vHp(C;b~!N*w4hTkSZ%;k!jcfA zr|{KO*?^h}B^O2|{`cwx{lTvNk)AT~s8~7131O?N8x;m_@=3B~?y&OU8?tQ*@m7*) znvqA3osez6PiCM0;@Z{B^a zCbUfGGS-!rs26|O+hetYLnJ3d6j(0iGW25O*F>Fb5FMOOj_2c)BV=>9*T`D<3LZNp z?uGf+Ajefz9K zf6$^*3JXHge`-{qdrcsoktLqw0{Z zjNF+V^}YK-I2__w-M58`d;GKfIY`Q6%nG(`d5h>CYuN}-jusf`X^$|F9x&2imI^@v zQY0KKI#dqf(*OcEBhnoi7+g>qqTm{&_^LpBu$4`MuPL~gK@A!&i_^NZS;)1 z@lq1_gJGoc*5`jZ{Y7FYN^Ruj?(B3m?oA&2*n*HkzZ%?y*OgUp z5q5I2uOOGFFcKx>zXz$Xs?Ak(c?uZIb!K++izg;w+|W|cf&c-Vt8?hN5 zkPsW5eP$UlHItpJjC6et`4%P?T@}O{_7H5yZl_ z%*y6_dihUYzSID2kfa?mI*~0bW83sLN7EzLc;T5u8wwuq88BFdB5xWg*0zd%e}(eZ z;8kAX&5KP;>DCdwDFO>lT)N)|5Npthi@qKaPv3CgB5Lhg%wpkx13i(?2f+35NJ{co zJn)i(pIJ$7^7Mmg?v$vyhE`xUb_=OJ|Iyf>-){R2WT3ZHG@QA`0}m;O03dGw03QRS zv0E!RWS`OLw-b91yabT2Af~B;_T91C+Z@tVjQDs* zn;7#a4tLY`I~tTN6<6)ck*$T$b|`-@m35Km{~qSIwrciXL}3Z;A(_mLl?f7^(E$@H z`vJTKfR7ThOAPRHvv;my^R-aQQYWnaL-*MPGrhL|+)lMm4A?xpRJ^#*t=dVi6AVk2 zkgM{gLJVHlCk!~0Ww)_RJoME|rkjp@;<7{ITM2pJ3E`DgwiMi-jG#lRjy`)$q%3S+ z?@5@ZQ}fUrC2FzS1|M}^9MzWBRQjf4Wv-MU`qFSU$ve6%52pvUZ?`lkPXU{{P}S@m zW-eW=Kd;yo1iL1bmrbP#i2T#ro^A*@w@nW&iUSRl*{9D&#$!iUjbAWzQ#Knm=o>i% z#I3=+SBgP(B5IaR(YAuOzyONnV2Bg|^^2EL0Sc-G1ZIm_(N|{ENw4ySBr39gFUg6l zrSt*W*pS>qS9>v~k=03B=T5Sh7^DmtzU49@fVq1~ANM1CJ~x1gL%&``*uCh!#} zNplCOF)M3xwVLv7^z;&7w`Oc-aAV(GyEaww4VHIJ%C_;|BDe-*^4yr`3H4NL>u#?W zJIJOLNhPEJgBcXuYFzU|DKC7{$@I^4JXG*&uI0v=8>cAi=3e283tiF?zo91rKXBX` z=B_-bI#0i2t;e|(I(P|iiv$t*kqrD?;eox)dt~5Cuw2xcq8v4O`jGmoGIOc=!Ybp{HzPF2X$dY;1VqPw@m!H{!Rr*B%jR4f$&Av=N`M zM-=_I(aF7UR;E&Sd&xpLMWAm~KSRj~sso&W{>LHdJ6OJ3*BTesOU(f>nUAd zO-JUrwE?l>K3dp`oPT)Qz<&2H2i0>?UI+G*%bQAyb{SIC5-_*p&fU7KpUTa1Y~Qdk zhJvwUR8&G2@nF;pi8;d>^Rg4|^Pe*vorB(FvgF7qZXj=5ubx!5q7gQQ5MuJf6cmF9 z^P-jIu$S+7B_}+8mMB}sMj%o1^N49L(iypCYsAyHnS=@G*2NIdlpyj$F2;c~&a* zCZ(4-ic{h%zk$uQ^fyVT^VK4&sD{H$2X|%x>jG)@1lti`?Uxw+J#&R_g?XxtJoEqp zrquaV*B|kMEQf>*jYg%qf1pY`6ou<80h@BD6I*V>+--5|lRoHNd zPv#_sDDY^d55($~HHWKhgAqWc4Y69etu@)=cV#R<~(-YZ@G=LVW#yU_Z?zmo>C3G+s}H)@1Rk3 z;K*UVVIMNGCPx~R+0UjzCP()aeQ;P~FH2d8>TnSg(EAqh7>HSEv&=-FVH?G4zmZ(V z;T(8XQ@UkyTe|EoXwvwd$`iIDSa;!1*!N!P0Q0ewbQwmFE0`u6{KcC^8uMpPT7`^5 z_sHmkwjk09$Gsd8C~6c$$#gvRo!9C%GJArRdNUdCCj&VOiGVaa4(A6AEQO&npkga0tNXx%=SwchY&2uoMv z)=e^;vTk3X5*+~>okI&YHf(e>+&MhVaxQ=2UK(+c9fhZr{6x+gPQULnth3&|#vhy) zhr-9*h+;i6x~6ud7kTg=#;IU^po4Kkce~`j;L#q667E7~k%AiAI7=Xpf#F*E2s&h% zI2_ryJA6C1iQGFmNk5Jz3@%;F) ze;Zi0)Uoe1h0t&S&YKlnq; zF**OZN`B2ydKKdriJo2Sz0V(RFbrPxhiI2pBK0;h0R^H+e5xSw{p;%5z7}<@iQ6QA zb&(X$#$8^;CCAKzH|K)fz4~V&9ty}>lm_#EQqXBAJf0z)N1*O><{|*pwrd+#JTuJi zRNS7)?*{0v(?as`qsiNAcwMtAP6*gnb^X$*6`H|qdD_+=ALnv!v!$2q1vMzBTd*kS zRo8lwKu=B4(r88_EMP8r%eRN5I6yhM#N|_y&x4fq=!R2MR~7cDy*;`su1R&V`FmUb zH@@^hwGDrb$<5t+O9j>j)EnUqb;ZZJrdCQg+m(ds!d~IfJIc|u@Z}g=N1I@|R*@~g z0Gu2kxP0(?xIR?0#tJ@6_6>OE^Bg5lK$5>HKV7-;{fH{yOa$2#aSly!kx2!dGbY|hVOZF>8pKKZmH6lbE(tx(rF9*J?qdJ?8G zj-I-lfSyw=&nE%f+_&(q0X9gu-(+O(S@Zxws=!B;5qnh7r$jDtXKPyYMIFbpa81;H zM|X9%CVM$3ZK*lg$)%VH^t3MZ%Pq{xNp__vtU=e9G91T|R~M5)+b_&pWaN`{aYiduZ|<=+XZtiStQAy&bBZU6JF*2`@|bzfvJl zf#O;n2@K}_rK}SPx5n{Pur%uq1cBu;zgXEA-L*#L4dU{b)I&C-YGI(mLkxxL=|s6L zt(mpf;FTy!CN`Mzjp41@t$?wuKAY)!--nLAHI&0F@8rM=U4RI8N-P`?gO<^3vz04hzBgcL#tmhjE%aLodjWJUU8 z3wlu$a%Ha|URD=k=X|Y>W1b>jezxPdOok0o-s9Zi-@vCE1ljEugqBogikd(RI>*k4 z>uhhVO30m?cVm(6#m`q(VSVP=+BV^fu#e!h>7HgA%qI<{{(mCAw|y8x#lCeY&9g4^ z4OyYi%>NVW^R`4X8?V4(fRF$H00L5pM9pG50k4dcKvp{xY}&qv)wm8lFi)7bDRj%s z=^-{wECLxp)YV<-IveC?)ASko&9hsYX^5_c!Wun*Yx-5j)svT#ROb|6Awx>AAtln+ zd&d6#mzt2`K1&???_$D8bxmv+mWY;`;F4{rCW5k zD?cUUlaJqu_LH`L-c3@(JjbmNU+qh)2>a#%#8q7rm3dSMLx$=34AFbRm9RuMObYyM zJ6$elhyW$gszAMeNHnu>Tx`I7%3l_N@!LTNYtDa`&?oJM8L1D%fq^N=(0*B=EH<{+ z4V_(KN{IPnXt-HL8KwHN)eWIM$Z;ZIyi|L0be#*vUd>pUvQ>HO;%YfQ=Mo0xnyp|X zHl}J#^Yp2oy08j*Z*L82{}DJV+=+4a@Ml2i$_vJm|{fPkcDdQ|BfGU{c&IKt-6BzWRS_eWQ~4*GTvC z?3ZvdV5*%CifK@rLbKnDlh=i$@|;I$TIUy+T?GR(m_r978MswmGgq~jl{mA8I?iNu z0S@4c;T4UUv95B^FK7FV!jxuqhJ@-((|kZ6(tS%iiyQGO`FU>k+zLdsX5x@eLN+oPHT`);>xc9V#tkRB zT-S4SUuU109n$x=lMM48%z47)p|S^EkHCSUWP5q`)fry)So^uY_TA+TfJj7^qkF?1 zTD{UNV<>s(`xxySQeyYqGDVA=Af#fSGKH~@R?w$fKIuq$FNzTa?w>XrdOGKE?LjRb zq7%-6aljGB^LpL2S7FLI5-#UCb+cys^=>xkEHtZIc$xSO3`X^;>&+Wt-SPdN_qOeb2X2{a+_ioQ42Om>x^|OSf$yoBf&&ONvU77bl7A$= zQG+yF$9Y6P000D!QH()@1NQvPn~@dk!==Jc9bp~|XhcHPs!Y-%p#^{FgM2&@SMRa(6om0Es#bC84T6aP@Ww(Ym-z=ZMn^dG<9KynZ>6)5m-3&D3Z~5n=>gWTV zzXOn(5LEJLqNQv1HkIFO$67^g)s#peCn^or0ItLM-tN>mMZ9jwVOxBn*FJG>LpkA3 ztHmGyvmsl^6IQ(h9@}DV*vKwVkGqH_rZ~Y)YD87(9=nS;T)7AA+S7yiXl|+e1f)IM zCtul{#PKMc!|*6FTBU80aO}&gKA}h^rse$@`Kc~*2{~-%)v8GCfk6cb;km=B_Lnbf z4$-dO1crmzsM^d~c;fpkx>;YVMMKxT)^!wfnBHa(%dYdP6Y+-7ZtP1*-txThlTlPK zdNirOqRBVp>Z?y*({L_+7-We4wnIOJBnI7@O^kh2sFJ{aZ+*;!qbRK~v2Q2Traz_Z zEG*ZU=GQ^xgSZ><+T!8gNfVeA1!R+8MWlwulCn@9Y1dJsTgZ%`GZo;?3Xc{t-~cU2 zxo{v9nLn{-{rq@ph2fXrP)d2o8NXBC71|d8( zeVN!k0?Hh(t=gYU8DZaYC7+Ka;%r+Yn(>q_Bjhx>j0x80BuChzbKf9+7Et(!v7M<_ zW;aYok8=}K#Ka7yS^|J=PyWDC`=!${nhkJQyINzl9#Sv>HCf}8R?FUOR4|i_-@MUC z5Q%Eb%_{mSoE96D-Z8g9Lcpe|FH9mMcsaXIo}>BO_B`Kbf8+oZ>DkIp>xLO3F7rHw zHv>M5iY({AcIl5ZX`F9pHU297awWR-=22m@U}El$UUe83G}VI3O~k5r(cDVXvM|pu zMix=RpzL7bU+kp($g?K*5BhujC*CKmma$#JqkBevXk8Wu-t$(MlE; z{jV*3@S>TZjfAv-!K=N26W6%v)(v8^U@fklMg>_4-A}p!-aPDP0W7olUY_zDZ6l17 ziS4|TXww_4yMYkj=IxfIL~Z&42P$MboQ?@k4qZRT9T}w`3QAKwj4UJnmS_xJocsXg z(=)Ctu;=O+glM*nMEo1EY-52v`I(tKqotyF=l#W1`DGOSx}CHk@0trlsE+&lT8cI+ z2h{6^Y^ao*2+*(YfTm8bckJk}3;P4CC7zx+MQgpyQx1nTFS&ibcKNvg73FGMQ zW@o#>Ed`|7E9#iGF(|W^=z)yuUVs1y4RlEbt`zD{f_19|x#ty6Lo1MuWG_sNbH#vV zvC|ARXEx^$n#}u*jl4DKs-Bu)2Ks)aiVURcP(yJI%R;Be*L4C32nA?9Ih%7BIX>h5 zj*ezMP0n=29U)dC-`eXJg~Fp!@1TAigFV5wjx{9~uIe~Q;Ia*(a8bR9=OOr~ivxqE zQk;Sx3KyC)hD+h*#lfw|MDvFoS`W1R{?RKnV0vyE7E80#u3Co#nEq+aiajgXs7h5i zy}CK)pV+p6CE0ckF1lZOM<723bhK+(5%`c*?fQ)08g!NRU~4!fZz?%NdK}riwfT8N z((AbHGyqnY(3dIbRD*?Zxzw#bDbz9Cg1+@?an(7KK--Wnxv)4uh}%{D=M0p2vU~C5 zdGK5?eIVwIlwz=79;nm%rd*sCZy^A<`=k*gj0N_X2_&lB4gmn>#mv;+%ixSc>IVN^ zw3;u6<5ybas#O#!HB=rD9t#8R!$HYF`C5dIEVE@ZO38(zMyXxMi#&Vc$*@A}g5 z-4@xz7G{W*a6YM{6Gql^S!%8`fVk(E1(__K7F6!kv(WshCsuCIlmh%GWWxX?RyvIY z!HrwyXN>LFur%ZaE#aBE=|`W2rZrwYr=D3wYgOE6MZ3N~CG>Vy;DHUb!LNy*UU@Zi za3Sz9%Ima0+5;BE$Wd}DvOKmY4y!=9uEl(`uo1@m$Lqv40gnd3X_0~Vj6A37-`4Fg zD9u@5n=Oe+3vYzlh}%v-%6E8j@fFOR|JdO)Ru!+pKRuxiu>yyFihU^G5Bi`#USSGU zBSuom&OC>@sOJ}~EH4PP=)4zX7z-|-)2IehULS@Q=Z!dDIrwP^VoiyFoT#s@;7yq+<$YEIl#KBTT8x5v%` z^&c6k&`iu{w2LX5kbrChVW2h|U?Er|xu%wWP02KO-u^S5o3D2Sd+@Byffj8Du=>44 zb&>*@3#B|K2zXf%!L6LgwD(~^$|nr`YC|MLGH;xKg?NaW_SF)O1B+t!4^;^+Hy?@l z+-XWR%WGI$0vF2y=D9Z~rg^zni7%1l@STHMaWu1@?V3S1Y1(MW4Aq={~d*GmV!y;h_l zvf~!bp5C5uMmoR%I@~5}*~=oYyhdrcm(zudb6dyqmSBJ1ir6u^;AB7^AWca3RyB!}NM*_P!%B_A=YN;0Ax zKIKS@!3ghaG#y{5xNoGq#ylTy&4;Bmk^jj(oLnyYSQcbQ1fdyc3h1v@^&t!!#wrl2 zQE+pZgIq2O+%rTbGHkvm!YYr_-tB+agX~my{1@@0R$Q^FwFZIUBGqClX&#X>QFewp zGyb28%)cxgPY(}k=|AdbSi$?6b72^NaqK4#4T10TPK3v2Cf2{Cn~{+UXmZ93vgZCt zckfS=y|~GIE64FD0G=*H000?Li)EKoD69gTgh~;OBUbJPTz(=vCdzHQzt6Bme;aGe zpn7b$f56|y)7cfBXHU2gNn8D+>cl9?nJ9eD2${w?N>)*(pqYB;4=UC*E;dvxBbrhtiSeAYMO^2Z^5iD+>G8Rc9D1$gq0CD3ZO7zg;2M{`XIT;mdR~?g zTVH^_KNlG#nqWdIiQS^!%D3QhC7JF|(pp!CsD^jNbyF4UZyqkrp#q%aowCBv^ew68 z2qZprxI*XO&;F!P8U0Vi_fgOdloXfQ7gdf3fbhr=4|nGB5K{mSPqywSBjJG<0=1?* z65X{mMwfo_+?edhG1=~89ix&wFqO5&@$o^LZ>RrL^=(}uUNDa=*DkavaU(f8PY8{I zWn-&lIID=JZfw$^QLlHJUK&{02yAz}M+?e)ZZu7L?h$m;*`=R`sb#B}^CU&`4ez;{ z;+r%A0cBavBPJf2IjupBw|(MrF_}lu;ML^-TD);FP6$Pq{vtEQtWCphl0{YArqC^fQy0cbvNAv&WVC9_aG zV5fDc?yhn!Z%-ru1kZV5!N|ynA4=EGC2{^Fy$jzD(SVjE40E1Hyesumf^p=V5G@C6 z%@LOKRI8osr~Q!O@5O1k1gW8rD-3|N6Q~+_r92anecxAeJe=j%t1f?Zm%36E2hf?6 z-ICKI+S8tlkfxl_Gg3(-c@G~1ng#41L%K;r#v)7N zgq`w~6d-9Gw&wZ-aFd1l27o)rEm2!`lmU9S=X@ZzuA+YDo@4+31{LmtT>8fqr+f&M zpjEgi_cY@KHM!=PL&rn&0xF1!HIx&#fM%a(Nx?ko%#Q4C2;$>7mOkZ&4GnhCYYX=P zSZadu$a%0u#p=sgUK+oJH~1i+YkO5qQNFC+N{R5(C(GEUH~3MsT!ti-=fw8%L}>X( zif=iKH7x=imn6j3zD?A-2f(MfrJ_;}{Cl`ZbJv(%xr*<(PYF0vPr*CPX=e z^6oNo=AKW0qObhM(XQ99vj(+gW<~5^f3?)2RsaA24b+^eOso2v#}r2@ zZt+ckO}m-bMNlT;tbs{ov=C-K>_f@VqAdxIu^y`m#(EF)Zq?Dj^kW(_cEHzIbZ#p5 z5^_go!}GNSvkNt@yQjv*mJi$f5%Pq=$*(<)DQXVuJ57hADG-qWBu|>hW8}x%zMi|? zr*K$3$ga&m;TGCCy|{50?nZfnWMrGU(1z5ejt^R%g?zQvaBGu$5i>a^ADJ!i>OBr% zb?#67Q+4JQ6x3^N=dQ#dlS+oLayOGNJJVSrN~AB!Xz+Vgc<*+11Yi&tlaqZ3muH0O1clEkBpLfHGSN%u=!5lXDG)BoxD6b4cBzRO`?ou2moM9h5$h`V!U_G^lUpGaG5 z*BUrWI3#h5Y5fgVar(Rtfm@3qKk!T?rw~1BYB~6yf@#O05nAZmu1{iVOr#(k#4#V( z-WRe-&VA0;G>hF1lfPw_*%@u{!Xx0hX4Qo zf%bnQ1f%Tl6T&Q4VNWGU_KoIFNM>L^xh6TQOoAMZl4XvaYWE0hOpn&qnicM$2vA zQ{2QZXnL z<=>pDOh1^mo2#G0mlyX1G^ET#PMoiBo%ORLb?m=1>FLb#DE!*i6wE9jp>n8`eOyni zf+!p~-kn`5ZxcAupc*(iS4vZqh%7QWti%n89i4}M%)#Qo>j8N%Yd*G|NIB9Tdf(M_ zcm|}PAn<|ucQr;b%(&A-??nElxO<67f75hhUK2&YD^;jGCEKs*3p9H=6$QLxd#J3h zgZniR5mQP1z9C4XHYDBL)GAU3SZOpKu!#l$S_^^>A#_}TfB*mh2jw+`r@!m?x^L!V z!|wl4%u*R&AwZ7Kmx9KlRlI@4;&#Nv;uUwc9;(PH+GK4qY3oQ1(9+Rou=}D7m6?*%-fbE)zh2><_W8n zX>|zNdrI7@DM7Ow{}==O9bjm)E$m*vC-G4Z!%eofzj2G z2Lx+*T0EKF>WK%RgGYk0l}1s|63h$4r}&iP1^fT|gGwYso95?^IQ0pq4$*^~2wjE8 znRGuc)2O@#nqmM(NDbY};JZ&+03XsRVWQ}f%r*CDa#iFYa}Y1MCKdXNKb15SeX`DB z{X~xGGkc}`DO4Xsqh%ogqUO$yNpZjOIfj!m$6Og;j)?okG0lr4#cipElZ)uoGz)`S zgoJ2o%!f3WAei3Qj?4?cz;NFo{a-`?9n|zlO<*|Jh(`uw!L&1PVBc{XtX4d0 zORioC42Px2>CEU8mpPESpxb`ct*^GaR)7Eu6(1x&X5CDpzv*2v_SDMHE?+3*jEswb zllNa^ZrUz4A5@q3&BQtybVF%p9OZ(pOC?OJ*|n$8V2@X4aH3aylf7TbBdbjoO7O(q zqZp1i9(hRz?y;=ZBGoi$55vK6kkk}@9afOrsAzQ==3Bd={ck?B5`n2wV+TqvTuQi= znTeRui@ZEH#1PTJ&thMU;2oyfo!B7%LWTb>k2~&q2frqz#lKT`2}aD^NEj+zHZ`dM zavMJl@Bi~Pl?fHMJtcRXIT=5HrwRTi>6vm=B>(emI?;;3Q5;hW`omUF!l82CR2O0GW|k!`NCh{R z*u#f~L_QScW~0bi&968`@iJQ9G7=O$XvC>}nAhZ(!1!C)XmE-+ z1g`E`m13)6v~XlG$Kl{LmdzWqr$1gAi6s~X%{cLSzu=cgye;u=+-%B&Ps^ZmOupHdS}ZiIL*ll;!MJxf z<8KJ`>9M06;;j@ew24-Nw^&|JM*Q*0`Ufm;nkEeb4RQMX`@!dB$?BIq&-~vD002y0 zdyv}!gab!Z+0tik|X@=>>lGa6VlTN^CVeq(^rbo%gG%uK;5}kb7D3Gp=Lh z&0L?UL#6~nbg6p9neb6~G(IfvB~=qY4>{F=z%s_Gl@gDU%{Q#*``*QoYq^l_D3}`3ySeVW+NFHN$_p->fugM?^~Sj$f+K00003{bzxEd&mD9tl~k8>8P5< z@zMTMO~V`c_O&)UaPHv9!UFYQvz})q-@clGAY7RM`Ff~PpSxAX=l-o`e*8kb;_+shgJ34+a{+JqWi>k zw8*mzH@|de(%WZ`v`q(&WX$SGDCOoNxXr19+-udGR zqkA_D=v7*XFKJehpF1TSzd2FaRBe&V9+-`jA!=M}-2}$YtFJCWMO4u26;taep@-$U&o$CEo>d=jy~`>tv>zt`DDbMq zHz^oh$#Q*bK~lFR#b(x2QzEoC{|56p{0*ohgE2@w8)1I{3)SKybm|mx0JyMR6T9d} zF6L=mWLR4?8~1oAES6E5v;Bo8KgIPxH$c>L?d1lEXn1>p*#(A^-WpU1HMs7l-Qw`w zJC+lmZj-WXnPu{+CCwVMV9YD`?utlR#@n!LPO1s(Bo&eSadHW(|K*#k%a;kmgUJ8@ zuW{0-;v*-3Iw_7I8zjJn>ORZF@^LGy=+(M-jl{^kIUYg$_^2jN%XBDbjK=>Xa3j*w zdlhFR^K9(Awyk+0%w}NzQm`l5kOv#y+bxfe?mK`Xi#_A~{WJbi@?>1=KHxeYvAD@& z4Km}zDIUZztp~0HeY_MWgY|JgG&BfT`3MbyvlSC+9X?3Af2r*`;-{^}!%)S%P-yi) zo)5%@>rxO!(tx0q@lD%evvnF;8p*kzN2{<2qT*P)wl8K9E7R9Do7(5C#;(>_{gy8W z$g8YK9=$bCdEy8_SEh%-7KX~FGNqN;;G{*E{|o3XP-FglNOf%TDgXc#RXp|ID|h?dL0 zz~e-!aCs;H?cHpJJ|0Bo$4|YKGh1b z?3E91CU3l6U32GCktU{bZ|3$1C+8#-F zXqxBdk+Sw~Yk^t~+4=5=KQ4Tfy-mSw=qH{|c$Y{e{u+}75kk(2Ixd3Igw5vjE&N^bX z!A38s)HA9rXc-dw2S3BLVA=u58mPMW$$)gtGHS2~Qkt0xrn0~IJBaALz+#fNKpO9HeC zIR+F5El)zu$vMT_Zyi3_IZjRczsMr6rKF)b=5xndDpbw#+y#w>^n~>e0=h9QQ?196 zZHR-GxG{i34+{Vv`q4>6M>iI8E9mS5;vDt;jP4WwArTD*o|8P?+nMsdcX-QY$cUty z!WL{3Z=^W0WtdVJp7FfRE7z}6!pplM>I1M=hg>5imdmvGAn|r)%p1WwoBN0%W-Jpq zGBfTU1^Noiavi{D*;G|YhnB+iys|;@Za~X6ydlMM(=H*9K|{9Xp{2qcR%1oSMsbwH z7eZiM-bzJWdaUYb#kq+i%qsVg)DD8dt8P?BVOp3~#qTb2BVl(ayj|`@s_xqMb2IwY zPFG8H7smRSZf^?P&OQqQ+*})SKXgQVZ)+gxA=%W{ug%9KU;tgUq2s-J%AJB#2A4chR6_Zp)F;jnqK-f+E8GQoWDra36?EiWpo;vL!3D0@Eei+a5r4Cwb zozwcIXkrf93_o%plP+sNGlG80SmkTRR!X$OU+kpuuR9gB`#3q!VpY$vCgmz$cQ+0Y zun^OT&Aib#%yNm99aCUef#UufaGZ~-zQ29{XVH6T~Tn&gd0 zol!myC;Hec*iRHWK(6cl-K9B|1WltZKM|TQ2Z)<~t;=KIN0^pzmeK{cSosT8<+`Wo z+-6wWM_kxQ(t~=l+L$>)tF%Ub71~dV+#VVV2X-;FGqgWC3H%(oRnKrBM^Mw^etUmf z!6*_sC(E6UM!`bHhw|qP%G%X9{+Dy|2n&qAm>N4jh7>AaXfcM@FMP9n~Si~k_mm2|;Hk|sn;Z#XJc?T0V4Pmo>s;$M ze<~DwB3@Ls+z|>r!SSbL6a(#soYkpf+{I4srKHSa+|xkk@CsC8w;R#^Uq1 zda_E*Deh-8gJMXs-@6=#%r64etSi0&loXQKz-Aq>LxAa~M>f@XTnynt1ZC5#4KB10Ao%&{pO000**m)4D^ z=bR+_W&zOrAb!Z!yZ%Jj;)d6a&US@sDD{g;#TL9_oRE_dKP zjXD}h~pMkUAx|%=ja}?$Se_z1P z3|Rnp;5r8G+FIMj%|PxcOvtP6>M&mP&go&KSf*uq^sp4VK` zSNJ;J%402N8aYj;3P zQ#y8!kTORCND6yrLS;q5;{26T>Zk0HQ^NbiWPNDn$}ZBi8wuV6V6!?DU89WIP%2h& zrqorp=nd_|f!w%X(i76M8fZnnqBA>UTLgAqaL$CiaIJEO=CrYf|LE1v4v1tUuHsEc zV2vwR7GsUgq<^`+Of*#_>)jP5F?s~L|9d#n+QNa_9E=;0ql&&y=f8Xh^_QRj4E)RR zhpSOI000I)l6frv0yDP6_ifl{ZyyLiK7^9Is=SM%Mz=(L^mz(j_9ad%R)%NpK%Hiv z7_^X~LcEHdsyp}FxpAzmd|`ULIP<|whrx;WIb#_{kUM+hO!g6z{>6Pc>JLw5 zmMbR*O|T7G)#`qD+4}%@5M$%r7CfQhstTWD*zBA%JRn+cwypP@F_Qs+gR54 zYZhl}VxTc<^lgXoGXzg9GsE?~zzc_TIZnbpC;-VmQVqOK;M`>scpu_a%8tQUKplvd z>vs010jwhI)+%+}IdG8d%_tk^{-f+S_{ouKb#@P@0$50inLk#u z<;>1~hQS@Ebx5TMn}`&)>Yh~L!$zb_lE$dh_~wY*$}iX@4d=SeW(Jv8ynt%-^DvM) zfjE;7fULjt;nEIw((;K|-(|Ke>ik1J5l6|X`Q2O~v&8$BHVjI*07*g`&-HL{K7b!H z|4u3nRf76VTmS&EVcc6rBZZA%qEm4iAzq3?Jdn&%{9}SmgYNjql`ihj8T-0%J6;G5 z^2*SG)!|%%gpYh5bPqP!9%G5?eu~aVybXoD2~CZcrLnvs{}ZV_$~XSjd#Df9%(Nu6 zFaN%Keq{DA%#*>v~g>wMG(z=j_*Yc0Wt#*Z(s_q9^|D0@~qb{ z2o7f(06Hsx8SWAI`UM_{|A==9BFao{s4RVCJe{Mr^3?RpB+Ea=0^HN3n( zX@s_NR8{YAn(>l2;Of{L0vmwMoQf+paaT|M9Z0IGh9yf-GYA1+P!zz5R`K?K)ASmn zfNw>IEb6k6Ks%3duxAs@tn#1C!Uc>1F7eB1I_4h4Wk7-t4sF2p{hn>)eFL*7sAysp z1^>Xc*bmG4bn|xQ1r-wpCfKjRl|T~)M}HJ{nTpV)(l7Jm1ceS$w~F*&2{|=oX9MQt zE0J6Z5H+fI6^ds710XdJR#rD!%4-3H)%*zfW!TH@Wk5=#ZC4TyIhd%iGN-rFqsw1V zxI8rPtb=|Y=_%)ZyDK_5>l>;B@MxyBDoI}Byc2Gt{As9C?Vamha1(A_+_;+1vTP=c z6dadZoF#O%DEp?q8Z^`WQOnTUq6B%uxXn()^+g^AniDmo1lAeVjWubBeT4f(9bDul zwj8pCr^}QUcwNDHhR6(P z`_Y^a8vT=m!RD1#t#*Sk!ZlwBD|y$2gm9=xD-aqWCtBCLJ5wDlb(#`Jk7L2-eT&Iz z8oNA<7^9@JCt6Z(NH6C48kzx4|t$-~4e-QCX5WI~UYTlU}xJC`P~Y_HV! zc9k|`_H+N}pd#~vU_`Tc;v3U|h`LTAG`{SY*yebg0Rh5n#i6->ErRgVdV zNcRb1Gj7^kKga8^Uw|Q3p?N}Jcgm%;${pX*?z5ko-j;U26g{l!&L#ksXp_jG7y9uk zfV0MWo`<_NF|? zMt;97FFiqrJW#0NI((7zq}HyfT0WAy3js zP$W&hCK_CVAv_cBg)QzqX^8qn)36@WDM>|1y})&`k}aZL$)9J=riNHJC?l7!sx*L* zfB*mkkS#C+MztA6YgkT6c=#O zO;l)Xx=yVmDG;}spX~A447B`4U!-Het3|7~jkda8QfR$!?osbl(5}p(TTs$d!ZHB( zCgj!EvUPd2BToKIwXd@R09t`kTuqI#r0`KkmG<5S4x0j}vTy7wTCy+?LV&M5xRxQSIV|g05ZXzm2=6)z;_c8upf{&;H5NhV*Ij8{^@;@X z3%qRSh9MXzlu0{KWcxUzmJ%8NfVW*EzE`U*GG3!uNpASMl4C&kC7Q9-!Dv5pFC=&R znjyeR*uug^>-YQHj*{N3S(N6qse%seyw&YxgZU>VU+TIT+EU}EC-vgK`m{(IStS;| zRT!wGn&Hmoh9=jl!2_hzmXEIpNIOBzb3-ltp&NYNuFz24*2ri`7c1c?L0VR+!ujR6rF}AMrn<%;ZVt?Pm@Zkz`G(;Pn1?B3wxr>bqzCWg zyr*c5Rm@_6!hu7Y4Dx}!8SD@@b9MG}3*Jj;+2`C91+@dSkmnz~!qbeQyF}NC zhxid@U00ky-FQ?@;RV{0$rQow7XNS){rJT_C4Q?Bu7_^g9YW}}nPe-%M8%gOWy%th zsg&F8kSO-yg%uX8JT+fl4oGf#2(%e6@Nu4SNeb453)-EIDj2VoD-{ks)PJh@ChBmh>E$O1nWL_6I^EDIYMV)7A5pH!}aXJ%m~? zJzeVIf|FngW}^!A&*fQl6Q zP%Rb8bfvP^c)M!aTNwj`l`OWj$TNj_G8DTi&wk{WnDq_LAy7d5a3G8}3c$E6c@-); zk|P%F^F|?lvrCux)MIrca2nwM0mYOUx!H0QaE*1x0RT}*7Qtd}o(nz0PoI}0W$s0| z={RoL3olvRz;ZDiwg3PTe2o{m7!mLYM+s^N=Sn^sKb%gfL(Q{;XF6VvA0>5Xz0c_9 zV`&1f7IMytrJRp$(o3mElUxZu`q}TiS7GH2pLD283Uz*C3LMT^Paq#L*9U~RsC4Gdxg~o8!3s6Rer{V^p&DiwU+^ECDT~^ zphs0fM|h3RQ2+%i1@Q{*`ot*C&d+46vts`811MkM8xhl`3OOvDGVWy&lJdSwv%HM@ z_4o_0ugAmM*oF6GXWq5x3YQ>jnR>2dOEM!OFCJ{4LO+AUo8;C`YQ|}{Kw%BzXHm$j zbhr!Qbx1wfK**fZpJQ0%EMv*h=iXfWA$uT&gbt`dsl=WkT+_vTW??oW(g7C$lny_& zkQzvCT{M$-1i7<&yyal8p;+!!coYbCEA2Gn^$-?#+%B>R5;qw_(R~%9IfMgY_mDc& ziV&Ud$(nfZ01Qr{5LjSod`F%z?YF3HjOlOLI9ENNqgUe_`KSW>#4@-}PVFPf0EZ{M zgeT~i&Q3j@Zta@fZxL8a#LOI(7yCcRv8|*pR87?W%DD`Dfe8z}zYC90tq4?4WuYqE z;36a%k8A|(V{sX;i;>l_Fe#0vI9yA29JSNQ|MzGbD>{9GPf}`+`G*z^1d~AuR&0e@ z@EHGD@!v{7k-3}{f2r`G6iv%DgG_Wc?x4o1k3<1apbwjS*Z@Y!gB?hK{6qjArZ#A> zh?&!Ok~*^9UW}|d014f4hZ2Q=u*6l1&_#Hy3Ao1gDNJ5DkH-oZN=3%0iF6!%9Q9@r zb5Go=xiUV~#z3X1A~wqUc)s-IbIww4Dt54>uLoYF$q27>Z*)Ao1xobiQ*Y*+S6&AW zo}Z<|k_$JCSOPvn)+qL_5P8M43-YocY@Bf#_WJWoQfMtVHr4KC!wh_=^v5G+Cz3;E zTA2h$$%N8W3q17jYI_%967P;H$!p=6+Wv~B_1r(i!x$&8^TO>18mr@9iarg!`;71I+?C!onsM-W#km>9O?n{= zuh^)I;IciBEMm&~=fe&DrmrJFRfdPU2&e*8{6ABH+-iU#_J&%{SaOwdWA>l~0M$bg zxkFV3rNx~IZ-0lEu)0Y&kCUD{a?p!mYrXV$Sr>4M#GAvtq*|LJ&2vE3YecItF$pq9 z$o}eNs#(2;YLvDb2$NzVXiT_7UAxBR(HoXg8)Tmj(*JIe03u0o1FwjPTBBHti~rvL{ugFNnV)?2-EDg%wp(SVwSzpjt%S+|Gk zAYNph0FXyr39cYh*w%16cCIzy=3;&Ki#xpvM>f-VT3JsDTrauBR>7-~(BPNZSz zJLFQkt|k>!ok21kkja;-;nflepX&(OIt0;Tsy73|p;`#6APl!j&gY*>jx%`7M40|_ z&F+Y-Nr`>E@Nb_U>z{^QeqJL}o2&8jEP^O-)Va6R7l!SR0lH-Q(g8kZ+7%LjE+Bi;*+lJb;2fKHF zZ`#-Fr*$EeFDgzc1aDhjw6_K14P-mRObZs%DMFKkiT1`VU=Q;W$(-tUr=|!;S!t6`^4A5h zD2cS545&#??*PL!@@5V+ku2 zJdQzhw$HE(#{#1WbYAF;d07gr>}QhU31b}Ohi|oRUO~2q8!z~|NP(#E{c!=dwM7T3 zA?;{4yrjR{2HRSkd1Pf>jXryXwMW)y(U*oQFlcsQR$A3{*Yc2?Rsq#h@DU;e1CKK6 zdV<23BuCm!OlCr$ek@!)^D5F&7Gm>m-mQ>@2B=vGN$9MZ5y(eP`8I}PPQ){xAQ)_G zMC@HEap|91DfFleD&0aChTcz}^U`o9&zA{YlER14#be%+Q}J(uG=cN9fs;IKqBrun zRDLV&jorXwA~`Xg6w;e*cgC__R4=ILmuoL&`r3_z3eYI4Mu48R2?M$bNna5i`;zfF zTWWoXBLj*|A9Oop0M~QHbz@6BAekbR3L+@f`Q> za)C=h&){`Ar2Yi69elhFpw`*8!R8PZ%#8v941MKfGt!eApkBpBE<4K7S*4-N`>Z+m z{{pJ`p9M)u?tdVgLhjF4`f%A4s?{TFT_&JG#`3AHKY%WzsW^a=qo35^B2oi7CRheP zM@bN_x+`b^05QD%aTa~|&as&Dw+;fpT{JCHp%eJTi&0Jhw)ky*2M(d=qlxaXaA zGrI}y7azrlAy9N%FG`eN0@?s} zZ6|IwXVZH^R(}2z%Cc#j+9qRe8#D>P>AvvM*4XF|%o7mjognZRk2${TzGG#oM%t~d zL~3p8fUa}K?rLp}7uR_|oIK^8dE>;6iz`yhdM#22T$ z$yLe1nGkt}@4|Swo;Lhwxa)eeYW?N;680pb=YY8ovkG($>~RSn)&dqeiAk<+H5?*4`1Q-Jv7u z9?R#NOs7# z9SJB8ebaQ$$w<=ADF7TGoI+=Jw2l#*qA~To5_NJy6ZreHFe9HKym`vBPn0*lHM2mE zTSV;PIzvE}q?s8Sd`KC>(~&Mw^UK5fAm6_{L(WILTN|1eYdicF09pP~n#f_UP5+)L zTSQI{WZ{TZKpihEH3`9BS^`4n_vC!-d_KJbBQ5e0WP zh;vt4{RHJLgI<1QsVph*fZ$dy_(GRHDp}!DLN;j#%C19sl~GAMtIkk~Tzcc^Q`%hSVgoUj`{higTkvaMIP;EYrKLWap&Sx9Hi6@$bY^IO4UWP)ZOeX{He zFdN(bIsRh|=IHLbTi1|-)q{Vt7J8gOo@FV|P!+BAZ{Nu2xMV<0BE`ZaDS{jdn1>?6 z(vNusK1+5pp%Wt>I>axxeBBwq!6zOq0fNA#mxec%w?}nULx^wzJYoPMMoW|Zp(>0- zc*1-DPP8xSXba93*sqwyZIj7)eK7S&eL2rMG>%^QBx?`++4!@x;U@R=b}3fLb6)0x zye3^KLaPm2-qYH=eD_~aRbsxH_6Qf+mgt5yqL>`o-|K+lSon^kQN z1ljr{WNNbA`948wM^x>(U`8Ky_q=)6x>dW`1H5##&*4AoqI9HntML583M==~x?w2V zNT{Zh>X(NjoIrpk9#AH?RSN5XJWd&VdMjTj^Hx%bFL6KEOeLm%0b37rF6S>Vcfye& z9D5W8Hwy#Bl2{e!<~Sz5kCO5bgWA zuf_SMKj9y3@BlkA3+OO;55Xl`upQJsD9-Ux_+6_S<^xMuc->Rs>dZBP^Xu9qjrCnX zg-wkWZi_dl$m_nu7RSiQfakY{MiU`Gr4Pk!**lx=%4Yt)eQ%XV(y=`Mqk!bb$ zJg>D0<;hCeL-jd@7L3Y_IunPz7qTw4Qt`3|mCB4XGgff!6n)WvzyQaMQ!^xGo3F41Pb_@4Be<#_<%EuUqqN(&g0i?W4lZ^> zs(3BH>x7?x002!A+>n26!0Uh80+z+DL_dxB(RCg?Osg{bUuI_R=W-gJs!VF%tm8i3 z3-y@q5STu%Zi&kte@I~jdS!;hq(;7@h%?{;zDlc05UoOJV;2<-O}b(}(Ln&7G^T6) zX?*rHs~xgAm`BoKU6NaLor@er-nmRjGO(9@rzE~q4EJ3v2b=)pC1{mLd~bvYOet7^ z1_FXK3z%-A+R;xFl!uUPwY&e#L-oZT^dh8QH2Ucge zNI*dy7GtVwDUrsn$Up!98fbJiq)+|eTr1KIDu-me>W60u5vqba4dbS`E_q;=0S5krp$_0MhR?>bkoN~6&I*Bh%>;V zbo42*NxG%m)5IX+XoGOP3?Osc3Of6dNCt=UFEAmL+)c=4bJsQke-_>l`YnYSHY0i) z&;r*^a+@zl!}*ZgPbM9hEAXyEdRGd!kN zYb8E30z>@EZMT$w1%(CBK{E)hiyI^4*vz| zx^Oy#I=~Br&J5}sEj0wLy?A?)7&4(~vYfJ70Ncv|AO>L*qATQ~LnRT7tV;c30_P9h z=a+c!ZzNE3GFMwiJWkco8b=z=G*tD~w2yl;vI9N338)Y72e}vG&=4>z5M(2JKZjwU zF^aZ{-Q|EYY95k!!jcnMvhE6OQ~|(6XceUH!)nzErp`TOEy5Q8*Ho zYL9yIVon?9}@grDa^P@4Rq6;FB=~u>!=1bHEi+?3t?FT()mfFOh- z*d2(06;M&pr8UnU8#99*YrW^E-S8L0`18v5jWHevF|~_o>gFbRa(}E?;x364L*mJ} z36ofmqTPZm-4!cAOWuJJr~#vfz4SyW8!$jS)6E(xLm9HR&2yYI!YJ1PIUsF!tDe?@ zaqnncSmW_U9$7N>r>#$3115>JL?xX8l1EPIHJ8X!KST0`BKbwxg3Nx}-5>)Oa+A!d zLeEAP^!{t%-uM?~{6FJGi;yU8vj)J>z+y03aNFpEb3WwS0s5sB8VaC<0Hoam>md!75PArND^;F{^MPmR=4E2f}ltcmh9ON@s?pX?(E1|M^y3O762^bfAbbmTiia zHWR0JuyBi{4sL-epIQqB5|sxTBq+NC-p;ZN5d;E&P5>9n_36XdP^n*->?#$eNotbA z!LcQSmCPbbG2=Pej#mSD8^rAu@oHpEfV`&Op0gX~naz3UCo!N-HyMP0o~s|dh*7`F zKnOv2+;)3WJuC<|ZRrFOOTYllxRg+ah2ZQyMjJu^J`KT$fV&r zoa2~wukP;mTs@K0#!z&@FRN819^t@zs{1^hL!(yPc**(dYBTszWS7V0Tx9co3_+hY z-9%Z)d)k2oNwO@&^#Bk;XB)#Vh4qH~OTL&B{Rnd5_udGKetThs^8fC;t7kK`r4|ddgwmo#67bP@M`aiug@h-PfYm#J2}$*P`77h1KrxB{JAlAR z^zgr+iO!x|cC&tm#U=9@pBX_takB!*31}j8F=~QV_^l*$pUB1YK@ec51v?&=(wau= OcL)Fg002*&5C8zO@}@Ze literal 0 HcmV?d00001 diff --git a/wavefront/client/public/config.js b/wavefront/client/public/config.js new file mode 100644 index 00000000..d1ad1779 --- /dev/null +++ b/wavefront/client/public/config.js @@ -0,0 +1,4 @@ +window.__APP_CONFIG__ = { + BASE_URL: 'https://console.staging.rootflo.ai/floconsole', + APP_ENV: 'staging', +}; diff --git a/wavefront/client/public/font/SFPRODISPLAYMEDIUM.OTF b/wavefront/client/public/font/SFPRODISPLAYMEDIUM.OTF new file mode 100644 index 0000000000000000000000000000000000000000..b2f7daca100957c47773fb6655226f57162811c7 GIT binary patch literal 335512 zcmeFa2YeOP*7v{m)P&v?R6HJfJ3u-KSV(em(%VULQcxkioRggNW<$k}9l6*L1iM$U zH$*Jg-V4`)ir9NsY~=l|y$7%6>V18#&+|V2&;9&~-}T!wXU)tmYi9QBDdra!=hFyU zPCC7q>hrnZuG#-4qNm;?&Fn}o$S*p2b^ZmUy>KXLnG4g4N>WaZnmnCo(JrD{QTavl zW4aEz^hzReI}t6b>Ik(A%+l6kx)swI)ylLPzo}F_|`U znZ#6jTy827GlzhofR`E$Mu3rE6c`P7EqepB7`A~X1~FhU;AXA?r-E~+j>;$p^W$hX zJSSl3NHPX?5Bxar{lFIk9}K+3e{>xkN1fC_hhk|T9SHBaG!6dyVu`)6^*vQ^0kzOF zDy9th#L!H5?1Q!aIPm4bCj;*eY#(@D{b9@di7cn&CXE&-9_F#xPrt@V3yRc#bkZ8_ zV~lswI*l}{oU}p1j5E0o4X%{{P-%gZ2b{EqT55OFI!b%AlQvM|ZBE*x$zrsVwrIMD zb<#E!i*hF&LF2?;4AV(#6k|j=X`Ng~o0B$Zf^i{TMQ+q|6h%`!+C-%mU|uq{;x9zC zNN1u|s%f5_8^Sy{>Z6hB(Vm^uA*X7gwV2xtTLaG`w6Ys*-A-Ys7v3FmZI#qVxzeYR zx~TbI`niAQxd`itrdX^y3R{T9IykR9cTb+@Vw%ek3;kEy|J8Z}we1~^;TCsv?7XO& z*xHyVG&>ih16dBE*uS+=Ub!mTuM0N!R~as%O1ZobYvWY4tb}S=AzU49%ypvz`PUk3 zvl-e>LCgzdr~2&%_^n3yqw~12I?)IVX+HjX6M8!c+gM#_ODJ;T~tL)Sgu;m?Zofg z8fs0QnBR`N;8s$tvFDdP_0@>wDzOH&gEUe;w$3?V?@rV$wR81GIksO zh1;9wcK-35r3h!q#`=cNxgCu)wKeXJ#$I=2OO3mwv85IpR)Mrl>>YLyet%IJB!wx@FzfW2y>-KqvL( zx74~jI>YU?eeO!+3Yo;JI`B(a)*5fBk`MIG@gBMu7&W1mdMBNQt_`Pgb`t2-w1%MH z;)J&mo5xve*oEk0P<5rJy2Df5=c%dm)YN)vTRgQLp1MX)UAw0N|8;p9Ydnn|p2kj3 zsMFKj;c4#lw1hmZ_^;E`9`nZbX0k2tGYb3A(?|PXQH;#Q`;r8(irm8g*iucU7*iXU+Jl@ z@zjSr^&LH)`mTOYeYeakR^nv1-pO-QbG0W}+3X2o8^PL2Pq5Jw40?j07Ech)!Hxz` zu*VbZ^MtCnc%iN^z*M)VxwhTYTvzL9uE&23p5~yZxn)oBt2`|=TnQ~Lo|e8APq^6= z?r>_Pt;^Hi>}hY|s%a0`dD=1Ega1%Vl?|ZH(^1vx>8M4zUe;a6)6wGTKt1A@cCJd4 zsIxZg>8$s32Dx6lf~ZCZx5(lu%4n zY+^x5Ur$SZNqtIkV{30-Ph4+mZ*qHPMp|jIHz`;b@CS z{Y|N{;p%jMR9{waLuW~I&HRABytberH!-a*97ry%PEJlr%gakijLI#n4(HF$4d)m4 zw8u3hR~PsTOEbI;Ie}nyu&^DkCA#U)fU<7hRVTmFX>OjPplzdK;1&^6G1H zQX4Zmeg3iq>B;rkNyQyCnGH?#eqTdc*jw)N&d(}L^F{^JdXv+#5=uJby3+cS@;hV7 zDg!+keZicJ;+~4M-twCC{l#^_Hdf71pHoWp)%7)fXj3 zR|HC1lC$Fbb8GXX=XBII=eB1SheOfx;|g;Na~jiYz2zCni9v5s$^4Xt>Z16RxZJwN zj>PmL--5z~n&QIf=&bzayowTURAy#>buh!9nU~&G(vsB^)s~#x(~{d;-k9B#oLt?M zSiB&yy1ck4ry-*)JFzaMcR@izTw-mEFT1uRBQHO;y~fucE>BC&Y{}~@OiA@N%vsP_ z9-rnd4CmJO6sI@l1{QQDR^<0pN9Uzv^aL7nGx8gIvNL0{!WF)f?9AkjlyGxjusWl# zvmhl{nBCLfoL65`m(^e2@9Sx4Dkv_`%5AJmn4i{BlGdBsn%!5Dkr7opzb(71r?51- zrqvf6msgY4lkCq-N~|p@Nv$pSX2yhDy!DCgiOKaX$*J{8>E61StfoLXwZ5!3yEkS* zS!_~IX>4DpFd?fkP?TF4Thx=6*&mmXk(}wxsn4y>PAQKm>h11IZHVg5D=ug#NKQ_j zQyosuPE8B={CVa69>2deIkv4Wlr<;NP~Fs1Q=ghuloplVQ&O6h6xWc|lvG>b&8+dJ zX8MzRTB|Ce8cNz4(~7G55{t7Fa?3JO>w6dUB{$TyM5p)*J7Uw5bDPusg-uyah2;f3 z;q1)X%mrCZ)h*uM`lQtULSM?9lA?^1l8%BvFzBn#3?&vtMP(=V1QWgfsG_!liaKww zE;}``C#xZ@CoQG7G_E+eKF3?$U0Ud?ifhb@ij8TQQ{9r8pOVtkk?l=R%q}kM@im~$ z8p6>9eLcyQDdm|3J-L|+iVK?43v-iFn|d2+n*+7}lD4>t=7#Rtq|97jN_}~Le&PId zZ+AwDx3JsW-xHrvo0}T%OKK|3h{|2yPwe(4C)K4zH#cRZbfb~yeri`Ar+BknfXKq7UR!>7XH90C!Q0DIm=I7RT<^@vHDngl|g5=!! zIVI(xtcIwp{Njej1aDL0g5q#eQd)I?YEy1hPGL_dJ+-78s}H30XC`FMi7QCS$jS^< zmN#UjRA;0`MOPKJDwYTR6Dk@6Ka)Wj4zVeFF%=F}( zijLya+}@6y>XPF2K!0aZLsv(1VSZgyYhk5tPIO&NO;%}2M`C4SVQh3?W>0Q?W>=X% z98T@Zj7e?C%*~k-&hQmi2h$3JEk18yNpf#}MV3F=o0^{-_7=@)s|fW5!~IEh`EBv_ zrMG?PGMA6U3PC_NlGv_CD__s z-JBfdi_Oa}j!jO=DvRz5#FW*i1oPrcLY>tKWrcZB0q3FKm$D3sCKpV3kl+t}du2K&-td`Ycs$$6Q* zK3`3nZ+@OPtEjTSA*rCgtGO){(~{wjPi*%g`WyY;x?oatoWHWEGS*+4o>Ea=6AJXD zrF6!(rzBS8=l9gq`2sDa#r3`AW$BIn-e7~jvZ1jyI+&8x7AR?Lsi`Or^d<#*duwt2 z2ozWfb84rf=FWQ5Y9N@Fu)@}ql8{Uv#&X`!Ny@>rbfvi-Rkp{DMf z{+gDyQh!<8oaFAp^xmkNhK`nuKuS_!NoG<(Z(3czm(=Xd4`mmH0>uk@<7!G{Tcfl4 zgXJ|%MZucJ#I%&C=*n_mM|`HYp|8>(Uy+&`sH_X-HKo;udaKg2s&gAUy`_QH!qi}N zbBQmtEvKhAT%M2=TUT4}Ey(aC1p?LcvznuO+k9!hjE2Dcyo~a?-2C#Gn%+QtS#sDP zPK&Oti7u?o>P^b=2TS4tfts$Wgo?uaKuv6Epd%qSJ<#b7Bxd_c^71Rn7IdelbXHXd zeZ>VOdGQs&%nDz6O@D2vKPICxZ$WuRNm*8ZUS3Tgt3I`-D-_61DPB;YR8yLs-jvy& zRov=pO;4*&X{?T|@D`_*wzWmo1hP{@sYNk)acTaf*rK!~pRcFW+aBE*%xS2vt!wfZ z`Fql804?HKo+G2lCRg`;#)tqLTWOy?G5qfuuUGKPfRUCBARYoZh0chUidoacfmgY*t}z zaaM9!OQ<)su(zc(BQGY$>zh**9Uq;X8tt!+$t(93re&3feE!6S7+iTAOqG<3nvtCDHLMbDF~biqcSA zYki8pJi4N<#vkRuhW>`!l)~(&;-0d!p1R(g3}4QInyBB`VFA22OMHeP7X!aN6XGF!O$2PB;7 zixP5+Yg*D;bEBg|CD~c=13%cbZ_FEg4nd!_~f4YjG~UloYdq%N@K7$ zsj0j#A-OO*4}BVceR3$Xwj&|6IGB@C7KqM>s_sv#t`BEKr8OK=6gqAGDSLOFzyG7V z)}#$P<(h%znHMy-MPI)C?D&$`XJ*g%I$@+I`OY)Sl3tu$`SON4_W7ypxbg2ycxB_Q zb2~a?i(h^FnbY4m>V_!;2b_3xI<;Nmb^Yar&KvH#d_l0W>)Q7>p0d5a@b23m{%qw- z(?_q?g418B+VV*)ojKzCOT34U-?#e6ACA9kQu|ofvZhtH-Bhx>uk(}zjTebj*LUBZ zzG&Id*DsiI;+njNo>};I&pn0DpI`i0!^6GPZke*}_^-x)I4b7-za3Y${=uI6emLUe z?zXSG9x__TT)X;&H}^`o=j`(g=MBjIBf1iJ_x#PX& zq$O30OM@NV$$y(Z{;#w;bpOYT7gtQ%b>XaqufKlJB`06`!t9FI#&t~kbbIrqX}<7N zC*JbzUZ+1C`&{%JS9!)C@%3H7*|F=_oc`_EZ$A5q@43@v4@BR3#g?z9UG+xy4_C#U z{LT1J_A1SfI^*zx@&^|uUHZQ5o?LbQ5eKBEo|$^s#V7yutPuxoo|EwHti?}FJox3E zUseRK@xOi5tt<16N}4eJ%d&^oZ8{{Of7!XWesWX(RlB}zyLZ*vw_aQK_xC?~eQVW* znx!`!^zEm|#4hf=F!1VFZ}NU|dtW~D$+7pXJ9TmHvL80zK5VX0cHphsCKE=NtK}sM z43E{CiL}rE2F7Q!;+L3bQNizW6s<6SkCn8)(T{a0;(w9eGw;7oGq4?C-io~bE3Sjj z@6&7EPQ%3qv;wq=4+c&ZAMD<)rUzDHn%M##0arLa(1Gi~)tJt9)X=Y_!hbOE#6Qx% zod2)W##EZdn8T6h-@sUmGXIK!>wY!urSaAlTJcNF4OrJ7;eFKO{}#Kk?cc!IjPg2g z-i-G#{cH9eG_8K$PF~{=5_ubZZ=tAv$$b=OUVwW1AI8c=+x-USd>j`X?Bi+9?_nm< zoSzwvW&QxhcXasgVLw5`_Yi^jHdkT){*o@@W0X8z#Gy{&N$uxY5ASEVJyXE@568cZ z;jy$2$0NremRTla4D~46^;K*eb<&Z9KWyH!A~mIc33N-{sCnf4T#sUJ==v7c=* zQz?npYu!$JSr^g-x!lV%*UX_QgE4-fGfacdFj5AQO_Pmc%*!6wVZKfKnP1Zk+}And zcR82N;ot0MY0S^Og!FFO%XpA72V*{f?GD1y=nQL^!gS+zW)ip<97bd9B3fw9q6_5w z(`df=5O~{xrw^b>*0ab1FLR&5nh4!ZXGB=gpQz$D*@gMP#r4QbB$k63&w z{5L9$M`)qs6PjZD4%VrZZ{||GSq)f$H3a$WM|oPQTk@!*JlzA9V>`#vSaT=xa1$-G zkA?p$nD-(rwC=*ZL711(2*%ifvf`FQ1TViH+x-pf53sI5m{Td?pNPe}u5rqXG7HqB zV&GwYGcDPJ&2=#fG2&otrqPUj2aPc|;Wr1wa=P&(VgZ=1(G+2+Of~=cg zuV_QL+6JxyJqn(tLjmhjqnk!^T5P6epj2!gI9_bt{T0+GETN`9#Nnu`|1HkPwttHgkoSMENLxouV3f9Q;4EkwbR~3>Gr!2u z{q)!9z(0z1HrDrhe2?EQ=idi*pbl#te7{KVnfKqLZ($pc|1$+z!?aP3LuLiw157RqX#WyiWMz7@hS`tiZ9m3(N0C9Y!*B=(sJbw>iq`YmslM z0|QcbW=vfr5QQ@cdV&2=4bMdPNfn0JLuyK!q^Yzkmbn7@395HaXy&|Udu%GUaXE^pZ>cL+fMj|e*p$_vC_;rALsl!Tf($_oMhI0HiUd;bz@W6t> z+ysmW=SjJKFV_){T>G8toE)Y0+q`H05rpObIM5ws$*n^r`urA5C?N7SIt zZlS-<`(^~SM?OR=>_*Hx1m}bin7@*iNxg>JjMr$ra~|7jEJdCx!M)%cssU|agp*zY zoeye323QPA!G$0MMmey*1=H23OB$PS9p##c1=;*P~>p%}U6ZySH zi=uPDD&B_f!+6MRv_`U)VxULjTyQ93kEg>xo%R-GXkX9_eLSx9e6(1fPCj!iErqYw zx`Gzj({aql^@ec;oo%i_8%`jvc?zZA7?Y(vP4hK_4$+RF8QN-^rCo>T;a1aP?OMz) zMPFz$^g4=`e$UYoZ5vf;FH*I*f%3t;Kf63#_0yYwb{&6qd7R%f&Ce;*Dy3Q01Y9S* z2R75mh_&C1OZ@h4$0OSLzZx57$~Nq#N$4a0zb6sLr(bd{_VwRmEqs5EYiSmqoosWM z^NaMJdH;QSF}4FR#*p=Yfqj0T7iglm8`mFqQ=Pbb;0Cb2n#LIB-IB+k+rU=G2RiUL zcnH&Zj!w|trMdrL;J$yPe>wkOr}gu(j{}^q+I|cDJe2uY3_SX)>5FM9#**58iE%U5 z^+)&&_4vQVkJ$Eau{ZL*2hLjzrhm=sLDTB@18~g#gG3w*Up$rlFPV+L4WHxxhcVO8 zcE17ojFa|YWzf9e1LyO3KU08Z{s^DO74kV*dS9WR9a)YhQ}hM^UU;HL%I(qgYt=t!#$_YjW8waD>+mAMT4 zuwgjYI_TfwdScJ?OW6J%zW+zs97cVNQRd)x^mgb;Gy&s;@#w#wZd^qkTr+v3ehdZX z%~OcoJV!2r#fRj;Pf8W;J1&! z*{G8o_@9OJdl*YaJq-re!y_d7!|z9$XLJ4Dg#PVs!DRus?&%v0uBBrKqhCg|&9jl$ zI_k4yu}q;HV;zlg*3rfmtZNTr5C_crG{s6pjGlmHW|1#q6SXnu%e6(sV;{l3!(d-3 z2lZeaSPFWf>EFpi~uQM7)Wy156bJ|bxZ-tAR6#G_5zbZ zi*Xn&ppWzb z;C}s6;OBn5`853T4h+`Uiup6ao__sI{UEwm`;0<*sm$*%Yc<}z_?>>e_=qaOQ8@1O z>1cnX`@|{KsC`V0dVuOM?eW9L6ZQA#1n4B_vGBb||BU_uuEzAkV59yqU8S!Q zsm2(wn13@~p@;P4VicBHq`xP!z~Mv#mje@TV_gRJlW%3w?nj__2zU1l+H)9}+&1tX ze2&8OItRROHqa079@#(==3)A0UO;*eyq-Ngj5O|?|0c&_{tdqd7g#hf4+Js)FW^R` zk%xgaq_Ixk_U=2tEl6Jn;J^DkqMzP%=JR&3%pBpmVEP`w>)NB7 z-ox`qGd#Tk;Qh9L;!e!tZT*b?nOFrDpsw?v(K7v+m7#1Z%?DIe+|I2C{fp&xHcnk0cNxV{NptLZmkiS)*3nyRD(DEp<*I_`vvkh{=L9~ z)4!M=co@^^|DkZ*{3r6C)n1p&`1{Xmr|)sERPmJcy@<3Eghu?z!w~KAk)ufW`f`eC zTy3$nm|+VB2yCH^vkCTytLCwN6?9MCY?_k>3X`8{zgyJEA$S1 zjvVurCXh}e@NVZ+yl%FT(x`xz;MJoxys>%`okbVWWweRzq6g_2+D`A%7xWX}1+|hZ z(%f`ts=wGx=j7(+xar}%9Iu;p`}1?%BCgn)mENN-X}6Oj6Lqp5O~+e?2U0ru=`g$u)Ur z&k@G=@YXU;BC;G7-guozGw>GTL3p*Phz=)yk?9z`e{>FAOjpuPw1pm_=jnC&fWGFY zluOw(hW1C9V(DPY#QUyG@hVyuEvM7yTv|(4(am%ZJxVXo4*HNVmgy`NL1W29vnY-Z zp)4w)Wq4h%n~tTGw2CgFtLYZnN{`{4)HmoO`WBI+wuN_1=>WW|8c&NU8}D!NTU$MN zANO=RkJiyObSqw^d7S=EJLzNEC3I)0A+#5{X$~deP0$=F#Yg2(vn-quO#yJ6&3jv@y~qN_()hUTL$WEs!=K?V)(ztkbHJwn5q! zX*;Fumv*_dCrZ0g+H<5`BkkJi%8puVqqJ8`yGhzxrQIy;ebPQG?UT|zFYU|H?x^mL ziniX7_9JP(kam}}yV=@?v_qsFCGFnQPLy^sY>Yiq+IiB(NxM+m#nSqu&5_nGZJD%( zOIrgQYd1;TDs8v4M@oB~v?oh@hP11sy+GP^(q0Z5XJ0GrjndvO?G|YtkoHk&pO$u; zwA-cKDeZf(@%AUuekJYqY$HfoOWI-5j*)hPv@U6}{drLb}+R2@W zlVLNZohNOavv_%oKjaKc~v_;;<2-tP$X zHu=jT|MMwzCM-K@ z*wxZ*lynJw6mq1Dy>V}3DS;{c9^uIV8;#{BCWx8=x%9uN&AJg zA4&U;v^%7IS=#5>4&!`|lldGsjPp5e80T}`FwW;Vna^><4wmUeX=9}INIRXaYz6-6 zkC5N~`FAaUGl;Jf|9u-Nm;HC`0l1FIz!hT|u74VErP+(?rd7B?+lZ@@n{j3L0Ip`9 z$93adxC;7;e#EuOFfmR{6jOyq#0hRa7}>*y*+H>rIf@^NQ+o1-r}y3^6O9OX#BdWN(+ z+Uh7rFV7D-+TyfY@~oOa<}=d9j=;QE=xyyi?Q;@ihv65t8us&X zNE^5-reWlOQ*%huP9|;4H~8jKr3i}^;#9p!@6fyT75b_AGy1dodHOp2O8s{IKK(iU zW&IWXRsB8vUHx0@LhEYlF5LUj#^C!`dV}7scj>q4TQU8Gc*#7-JlH(MTx=$rDQ2qa zHPcL=nQms7nP!%mZRVJ{W}caE7MO*m-z+kV%@Q+UmYQW|xmjT@F%LBlGY>bHn#;^e zv&yVCYs^}+&a5{Z%to`x44NUc*=#YxW~@)k#Bg`Ysqs*ht zW6b5|vF35+@#YHi1oK4mB=cnR6!TQ`G;^hSx_O3qrg@fmwt0?uuDQxQ&s=T(#av^a zZ(d+tXkKJqY_2sgG1r;v%}dP<=0@`}^K$bF^Gfq7^J?=N^KNsCd5^i(yw|+Xyx)Al z{G0ip`H=ar`H1qqpZ=^7;CIG&f3e` z+Zu08u=cU`wf3_nTKiir>j2AbO|m9iQ>>}hG;6vw!CwoalvQKC9n4!aCAA$~xLQ##(M2YaM4DZ>_LSvQD;6u}-y4 zv(C28vCg&5vsPIbSQl9rTbEncST|TVT6b7?TK8HHS`S;#S+85)TW?rzS?^l!TOU|o zT3=batRL*DwqcvLW!v@$d!#+e9&PVqyX^7S6V{Vjtlv19ExJKj#P7ubpBg?5s? z&|YL8Xdh%BY#(ATwv+7?JJt5uX|~T!w=?WaJIl_tbL?C@&(60C>_Xdb7um&jiCt=! z+2wYHy~IA$KFmJcUTQD1EA1+~#;&#N?0UPwZnT^1pdGTC?G`(1x7uxXyWL@T+Ff?H z-DCIKeRjWngngualzp^)jJ@1G);`WY-dzSkInP*c&NrjXXfwu)HRH^9Gr?S7CYnj+ zLUWO^!8{PxI%4=1@T5}oI$UzzR!rXsu9iIwHE%VhF9+BDW8Mh#Ux%MVg7xQR(c2h) zH0E(H+gaa;d6UJYCk(zczB0ZxzA?Tv zb{XFp-y1&|KbpeS@CnY{#sF!<-=UGFZW^X(TBdDAn33iXbErAY9Bqy z>)C0%X}o2;ZMD4{R>srU_4aX~4di4zQ!naIiP3&~MQlwY! zAg}8juQwg9RO!_>$m@E?>n+F2E4}&$d2MpM-gdmwq}LIHyl!y3-f_Hq((A}UUN<^k z?>b)T((9-}UN<>j?>Sx>((C9!UN<{l_;Ra^iA?Er%pk8@9Ip=?uPo`ce2~|zj@O5d zSGM#zc97R?j@L(ySB~^LZjjgQj@QSISFZFrevsE4ju(z-GWU7XYsDb1I~}i29j|=p zb;2O8yBx329IpcDb>bke&5qaSj#r`dI%$yC-Hz86j+bA0;Y##Zd)gMq>r2P0NP3+z z$m<@*>nq2rSbCj0$ZMvj=%S z^@F^gal9ZRAkgS?(|ydoU0 z2I;kSkk|8$SES?BD7`Kjy2lP=Zxo#7mRJj-;EcI zmyDNDD2@^*=#6?%@6?y; zC+Mf4H*v0hp}s-CMZZUXN`GG8uD_GS&x;O!Unc_73 zWr@@Amo3i0Uk-XbYtcVkC)Q()8}U~luE1ZRxJq1&Eo>4uh+=UI{sQ7Q{FREW=p~hj z$HbH3F!4P8D#c6qs}ei#S1sNYZ;2Z5t@uvV8T%RgiAKX^xI~jN$(SU9#x!G^2pKbt znWEX4ZOj%e#$02r2pcg*o@g}+j3wf9qs2H?oNt_MTqtfeHX65xdyTt|E#f)jG2=1u zg1NUjL2N^fED+yoxQ~UtS{J#r9&HUBu3f5qL(8>Yx|?3mC+qWQw;rYEilKVGULt1c zrTP*Pr5~mrF5>lNdKI3Ttk>(sLbOaHo>L9#&Ei0`P^UOV@6mhlB;Q7|8^s7WRTJsi>qu-(5A#(N2`erS(>6Nr7Bv}18mCf(afxvWjW@2uGvpJDTktiOsm2yO>phK^ z!4pmzX@(!~t`n{rmU3Sj9e?aqcuL5LQ9O4Prk#k?$7Q5Wl93uOBXyyS)I(&XE|!s+ zA|o|bMygjvs!v90hK$rq8L3$^QnO{G=Ez9Rm64h+Beg(AYN3qOA{nVAGExIFQcGo| z9x5YsnT*s*8L3q=QmbX8;@FKcEyKGege#zClu5(&mnvh2(Mdy$Yw;}hFylJBAv_$v zacy{3>+z5!Mm%k;;1mn_yX^k?j_f}6K|aouKGTAYKuqw4WrChV$@-q zI9eD117`-@k*(sr9D8*F{;vdCBAdpDo#HJTr=5lP9?$JfZX*iMsZT=g{CJu@fOq1j zU`=Icqa|pQX(+`qyctr1_07QA>u9FYh;7V5&Ys0vA3Kemct`e2<4e3zp_@A1DY+1D zd(XpIXqXP+u})fy5y_|U1fYh?PPvE4{FPx%oX;g#mxep&zmre7w()XpGv(T5$+gYF zGP*1S$udOBGH~SBD91A7ZKv_2soUk46Nf0myWBY9#p7wfLOerQg*yo+;wiQB@I2rK zJVW;w?$B(*^JX84A!0c0&5RdZ8%0L3QG)s?MQxND707AWXfyf|#aE$)&~nBmcBqN=$^$!ul+&+ANi|=jnUCya0J%0^fs}n7ag5o|Vrups_%|`_A zKmAJ>w*xOXNr5-e2_=yjcorPbI7)awlk3L%@s5t z2XwXqdnDlrJBfkk((&Y`#JC7LS796r#k1{@5}tCDh~dyOh2XoUoNplbsf!AQ;26OXl))Dz2v3+wjANjO zDe$%#;dxMraW{0S!nhNl7OM)+;pr zy+NThLmL(7*WiEdqre?K!qcM??Nn$;p`8WgWq@`mv_+wDKEn$6(e+lOf%XEFV+cn* zynRg=8mLV5oJ+9y!1BcSbqa=ie&6;2rSlIZ(FdlmX*XrDsoeXC!gM?sHJ7`z6>VP-32xDgw<7VhOMFekay~5zUbJ>6q3*DeF;-MQAcq@{t|xF33h!nhI2 z+=MSmnouqqusOfHE}&;XZ&er%LvK?UthXy{ERUzSC8hzrQ(^oKdY1xk;}hF%pdqFwhz(Cv6R&X!iyxr$863S&@h61h&#s<7SCK5cT5Hq0{9sDVBKo&UhmFv` zEAW*P!u5jUGU!VVA37`P>2Cz%l0s=uU;k zb^E456VSI5;wR|a3NZkEN1-o(zN^rhxDLNiXdHuF&abd6VwApCXg5K>QD_|3-zp4F?^0+S*Moz`<^55iL-2NhM4tiOtchTKpj7;SH;ra(WBN8}|MVihCd z67CBK)H}mhE5cInxn0-_4fTNMMO0Idtk^$7S}B)AQLb|RGP9q@Tf%u#5^Lp=&U z2lBqhXv?AV6tWjTA8DYS1dUSgIax$23|@}gDHg{--afYtz}qq;;uYE{&;$kE?jW&1 zp`8g$Qs7Mt5(^dh0vQR8CxACd2+u1?wAE0KCxCZENN@}R?Jv+|1@48ANKxQwg9Nu3 zz&kG_I6eU0A|#Qf;PW58$|BLOho&p=E(wVYg?248N5RK>k*m-ihjOd{KIe#hg?2x* zK*8rEQK-=FgZdq~9f}k>mz!e<@bOr1+yL!$Xh6ZoW5IC)w4G3n6~M=PQLfNlhE^zO z2Xu)7Z`_jL7y^8(6}%4s?L+9{3O<&Lr3&qB=rRQ#?*+#h(B6VpDfpNwI1T_tj7Zcd z_?Rkc724O(It3qRMZE&!8zdSOd~6bp3Vj%~$pKjv9G5^J4h<>z_$QhbdIXeXn$bX5 zp^t>}eggQIEqH$b+K&__W#9mYbt6uKMQ?cjp;D0C0B*I^>G&tWpOU!g}s zk8qd)JyM}3K#y{m0zF!x$3V|?;5?tBFcv`9DY#D|)+@B9pw~MX&`k<`H1q-R2=+(L z^P>Q9YD|N2tN?yiSUjoF*q_%0xW6o(R%o2ZXB6Du5*(L6+X{V7!RI~kyh7UzZB|I?RH;sn8Rl?NpMU9jpOt^1>Tk;!7&ZAl~9gjKwF`_{{W5a z^&LmmhGSnEODChWzj{492#I`%IF(pqR<<)8|}A66^~a+JGU%9{0B-*-%SC z$xz#&1d29b_?k?Mbm)g7-;BJD8j3V?92lm+`9rXSoX438zNXh^IV^*s{TRL$(dIZj2=yrVxI{&lpEF*fR-kDH8r zH*`ORjdJKy6!sg?sbCs>-h@t9*gK&!6gFZ{M_V&C%FK1GqmJ#@p=cw>HpbD!8AbpQJFjK2HXxVxP}|u2gWpL_b3z@}XxcL?QGn zheM%fgY&U0@~U3|)+3z=-2l-3VlfnL$%t&|H45(I=!gUTdQb{BIV^>8ULOGmfkz#d zKp%5B9QwG!GUyWyhe4los6<^LUKs8>7(yYcp@{@Ua$OFd}@(Qk^3aBlzMWiLFq?2*c-RW4MCP|HcS~KwTJ!9Y(B!j#6;{&KM1F zJmS8ZfnyOPmO*hGVz>{5FJno#e}n%kuSB#$CxCt6(*@mE!Tkk3f5>wPV!@cG5T`-+ zSBTT0E(P};4D9oaSPOM4xbJ|krb@&{=wyYs0y;$@(B8&WFb&J1J&oxKaSL>YLfi&L zj2m;{b1&4R5RX9-^9=Vd`1rzbAHkTf;B!7k{v|rsL$pG#hhjfq^hPL-SB%c}6Q|HS zq45fx+af`sFNZEr@Vjq+!;dnO=R&d-KhbVL| z`(lND2Q*osZ-%BQ^n0MG3jKbl7o=gGh<(GS(4T^)D|D_C?5m8v9hwQUk$w%DqtM@n z<|_1$pm_@Y3uwMV=h!Y#=tn{e6?!k!uh0jeMG8FxEp{-VB@P^GIMx_tzy{?2`;-0w z6#K1&poc2-RnWr}I>+AO3jG1-Qic9Bl-n3^KNMd-mgw(5+ZEzsXorH2BL=rI;J%U3 zN2rl3G3O;5V7bwIrP>w&K^Kus{1jjJf z1<-jNYZc;H=p_n1MjPuC;y5VBF5qLf!Q}*e4Pk6h=o_I9g3EfDLT9~P!Tn|)>tyuZ z&?^<(|K)omk|EHm6@r(&Mxoygy;dQ3xxXs(d!g4UxZiAWyD~$;CI$DKdCW^P9D1Wd zu->HL^8k-uGvZe0Ee<1~JPiczkGCoGze8_Vh|N&`4d^dI?^FoRGsgiV0Q&*%zw`YG z$w=rH1^4B7Y+5o3x>do~69$(P2;QgeQ|QM+dH)B3gACg=wW!R^Jd3v`Z6jvK(&&j!Z|z}Lmda1hrZ&2<5EZhtO65Z6LK zSLhrITpl3)3jI={b6k9-5Vu3WR_NRg-za!Y$M{yEw?KC(1aIp*h0gW#y@JPhj2{#_ z*X@rA!DabL!TmCWj|D(**#{KdM>9t$_?p&495CFcG10~hzVIgRk(gsq-iwi*s1V4n zxxa$@H|A7@xCA;)q2C6bt`PS?XDGOTXUh44s8DDIR8tVP zSp&brI+cA_2McXl2B@`9SPyzG+g;oP~D<}Y+q|ln6 zlNH44fm3at}5TR}^qa}-(!l-mhV z7&=#>t$=bn0ph%IodWF`DAzlnlc3x-KsyZ@qrg{?$i;05w9}z+3Vi2?T=5F+3}}La z&V+K@0PS2T#}>fXoyf)U2eehtg$jIQiCl{m+Ii3e6~ya2NTHnrJy?OS5RvN;g|-2@ zSb=X{kt_x0$Kx2RcMz%y$XD7iCi2*K*RCM#c>1ZVkpNe(5{AZ+yK6Z?8;PV z*Fm!sv<{lB(EbX|Q4srZ3<2#fXr6+$K=T#aV^EGKKsQ1Q71}*ezk;rZ7AbhF!d0xG zN1-JO?e9>IKR}N_OBLESXqkeZhn6d}H=rD6fS!XcQD{4$hbrhPD90$^u?-i;F`y@* z9CLujK3vNb^b)jEp}hmGQt*9ISG7WW7h0pB*P*ov?IUQNf?kEzE3^-w4GQ8sG%B?B zp-l?D=jjS6v@f6`1>YleH7m5wpe+i%7wQTtw6CD83ckPUYEx*RLfaL5f6~>V&`oHk zg1&=xDR`X5)vX}TLytlmfc7f*UZtx~!DBS8euWUwBNX}w=#dJ(U+6kYp-+Gwt>F8I zu45GXKG5X~zJKUCR>5OAuHzJ9B=mTNJ`uV?!S@ecCn)p-peHK$exvIog}y)ZWQ76rprjqw92qJ{5X~g6}K3&Q$Q2k?SmlU>`mf06i9ZjzUa_ zo~zK~p{o>P8uUDc9tT~m(6}A=SOBzdp=%WSBpBH--CnQII36}Bh|@PHc#PY1qk=eoZc=C*CpRl-JoFZYHXM4Zf;g^j zQ}CFy>vjb#hTfsjilKKZCKXj{t7D4Y-X!+3l z6toa}ze3A{KA@ljp?_0o1<(f-bP)6*g;ofCSV67OM-&>z{-X*CK_63S$3h=h(6!Jf z6dITNNd-Ss<9bS=@%Eoq5SR5Cg~scCRzcjh&nYxs_IU+AE981Xp>ch0QxLc3-xV6K z^F;-5y}qQ-xEwDlh|BtlLgRIBR}k07s|t;`|C)lI#c{o^&^W(46#QI{i}wehao%?- zi0l4Mg~nxlOTo`vxZYOqShMRL1$_^FSHa`fuJ;uD42A1`h0goP2MT`1!u6p-9|QeJ z!OvE>K33>^K|fLOa~7^o6*}(=pDFmc4A{9J?UYlS`w z`i(-kq2DU>dC*-7u|M=Xg+7M}ZsbO}Bd~t=XoZ3Jaia{3aXb{|WQ^mW$P;6%fbuf? zAbmD;Uj>hkxF-S^=C6Tb1MW#k-vynlFiwC@Q5YvdQ69!v3!Sfsm zAPy#icMOk{OUWTq$L?D)@p&gmXQBbrYV<1MSVVw+~K&NT9D|jEAhPGlN zQSNDIBZkLrr|~vGB+5Sx_0AY4LmyS}SkyF>lZil#O+$W}2*mQVcNE5nc(fcb$nd!D zjIoM{8Bnw>6EPjS1RM&V)1ikcc-(i!;R+rPp3x1ChCkYU#&UqXa9zznUT2)6=Ff`5 zKPW7@rWD# zr+)+EcF>`d6h;_2S;6DVG(}Vq*>LuZ0nNOwSID|l>^ z5Kjz`)AD!*V_XD9j57wx%i|dgk4f{`17jQsMST$VHNL;V{Soe~@EA4sK^PuG=6(lb zTnjx|!DGpEh{9-tE>;+Q&}0V#n&QB9km|tY^Ez-IaXgd6LwyeU&~%4FXof>MG}EC9 zn&q$pn(c5RG{<2jG}qxgXr9CQP~HZ!0Tehq4CU={eLMz=9JWD=9d<&o&q+Rn1{7il zv{WI6L(3Fm9F+471mc>HqkyXn1bl1(E$n@%bz^pdiWKpH7D z5fu~_MG!^o6&n^1EPyD26vYA-6bmTLUJ;#y8}OZ(xozY3{=8rB2ZXa{=1e-~$5a0ORs7I077^z{CJPEI>S9+@CSvzVI%<5r8a!_XNOW*!u$D{_z38hXA-w z92EfT{xJ$n0^pAXco6W%0z3=&6M#LfS><(ZHj@N3^<4aey-v*C;k}H z6!7zw38%nz1CF48pQ(6X4FlT`IEn&(W-`#;7_QpfgB*%Lws!+Aroi3{7`GXK>H)wd z6xe3~mr_tY4!Dd0yA^Ob1=S;fD=4tf1Foik$9txR0{cE-tSbO2=!17E)kgyky`xO@ISDNP+VJJS2cO;9&|}2;fBm;6A;W0+#@Ii2!&E zSSmmm;AIrJ6u{RAfcxlj3S0)@YX!i49iAl^xKzOF1i(CRprFnK{3r!Hr@(e|7}%qL zw+f&Jyp00q3HSiOyD;CzJRbo7J-upk4{|sI5DRdY0+$6C<_Q?^I*Iw70*m$Z2MSkh z?m^cy022YU6!7|()d>L4ID9sOfvm?I!Hog>0X|E?K-N*t;Kr~L0Qxxw)eC^3Zxh%J z*o}hfHNfryfG)GpKQZ7rEDQQg;3Qx#3Ri9JK@Mh+&r5(|+#&ELU>H*{;5i7JMuAHN z3}XrgbrRsI6tGLiN4^z->1AKyl?s>pp0DKASsTTo%ML`F1-K#eD zAQR9}ENc#AtosHqv=0Uyj5YY#fI)X2FtiT_;>OPb0QD}wHww@K_$CTEXkToEt`4fs6@S8eV=4yOV9 z0AOv00k6xi+T4R24uTv;DO|O=2iqVP)<=Y?m-SPpT%dBS8eX$+3E>$xRt_Hn|qMMX3z~RlP4+Qz2U1i z_wY=Cy}PS6_aFyYw_LTkhqW8-7jUkGfzbl~kpc&2j#q8&VXbl}n5G0$xN36`a=`lm zR{+BvC5Edu_aKKiKn~e}-vrnT?s~v*)`H=x&HYuI`~S~v?wLQ$V1Hi;y1EcB+!y_4 z7r37S3^(fi33E!g$4u=E`+Z<@c)dysmdDAeY_R2h9`2mWXDZ;2AtmEFT0W>0cz&V_U5JUIgw!zFR4+zPl8_;zkRcOTrZ@hrEEdzbqV zZoz9;53B#xc*E_3(VAqp0^@zghmE5l&LKV_fg#3_@Q|nwbBHBmLP$x-oREbfOG8$K zTo-a{$Q>ashP)E8J7jOjsZhVrkkGKuyF>2_W5e9Tyut#*jA8L%6=8G2Y!RIiMdWBz;_qH5{>{Pf!o!RSPBP_xaHGa9<}=Z+ z-U7ET?1PlsVHS}CMxXlu9=iPS<%htS>UJ@fjQ;^kiZfO)eD%k)bISo>j}(3v;9tOZ zoxA&7#ks+AU;!onJNx+AN3ieAkuxyL0)FAl<}(Y<)SM|h`_I{%&Mr6$9GsnX>iF3i zr;eU&JoWzBhEs51Qdb~^La*jG-Tvpswq;pRweM+-MhY+$y4 zt{%c~M8cfNf-R!B47iPY5wzU_{QVI^{}OI<`Hq7W#CH+mi~+#Y5PvZZ|07>f^HBT4 z9YQD6r{E^%58?J^kP-$R%&v5=>h{1dJSTMfbqC=0knSBF%tXb1K>b>GPWOZEH{BnY z2KXO>M|79OI5O7#B)h3g_q9yXk-Er!;^N@Gj7@M~Mykda?u@wy-*5uAzPzY@P*W9Cj*emVb?QZM`^;|LQ*g6L20MW*Vk_Bp*2eb0{XMs^ zYv6{E$JwXYZR~OO1p5Vhmi-Ox^6`Tk7{j%KB7LLehO}?*{gd_{kD2LzL`_EnX6F0qkc`dL!(w7R6nipf$>8RcJCS(r`%P} zDxJy^tk>hgQauaCtO9sjV=lan(FGRg52~I8oAPJCmV7gu{X7Y_)xUx5^j}N>lL)u# z@XQ#iVHxH!^Dmdl7O+)pHCw|rvP;=(*yZdBE{A=VeU*KgeTCh@zNCJM-Oi4(JbM{> zel(ZM#i>HThFK4`MWL*pDw@qv#j<&-I5uAu!{(||*)o-dtx%P)Q&g4gOw~kowyG9v zdaYo;+XS|{&0v>18EkN;fW7SuwqMl>cC-uG)v8{0m1+Tdy=s8HS=A0VD#fT{RX4G> zs%~S~sn)W0sy4v)pYLKfsqO|F(+9ww^l`8qeFW@8_kvC6t6=~63cH_)=O(B=X1`G# zXV0iU06Up)*D=*z>B-*>iBe)nBR~!A9~Yu!sDYbAgM)Tva^hz_4Jy_&Yns zaGVF@&4n;VE`bSR!nhPBn#*9q!CGWLdz4Xgo^bb5lsZ~v0DGuT_FB~r_AS+)>=o5N z9H;u5V^xS#t1fa3SoS!;msnj{4sLA9QZ<5g+b!(vs<+@vvHRE$;YO(-b+Bp@J6Cl- z`?SglE~$y*vQ;m$`&1@YuL=aat4y%{Dq@>eHn8dH0sE~N*xjnv*aNC#>`B!}>?zf+ z>?PG0eCzTn_6NAX{!i6(woetrda0_}IjUV?N3bAmNo?CQJ>UKg;-KTq7w_CSIcR=^L?k(Lx-D}W0PwUP?|2(5Rr8}wnPIq2+o@>#) z#FVRd!gH_^?((T)8n_v7=T<%3Lp6n)%1z^DbJMw5+#I-(s+E}sH)kPc8r)1YLhdCR zW!`5#fcuM9FpJ?9u1Db>uE}uc)>LK=+?=!kZcM`Wdcmz?a7WTw7@;19XY6LU3+Y*M z2huBW`_VpTKl3&`h43w1pE6%?ZQMNJ2C~I=caS{>Pn~Ds=BizAW7S@`sS0kXQrt-P zExwluZlxOGmUBzEf4OeBjw%ptqjFF?azAlD!=-k2b8BG~JjNX1zGEKZwlaUK{h4Ll z2IgJvJo7mBI^1d1!_4IRm{Z(krk7jA3~;NN>$pwK_1wM84cvXqO74DU9k-delY5f+ zhP%T2pmt)OvksJ)pU zZY8sjyP3J5_G6ZCcQ8x2bR_-C@A9Vop2=^MZSIsfEaF3|m;Oj||Y_=)_zKRqM z)@uoDp(=?jR;97!suZ?VWoAoM$*fgX3Knf;@cgWRlZ`5Nma2wrRn@Z{sup%W+-EkR zn#nFyO=EjibJ!u(Z1^%yAAE&pKJ5PA%-*KD6~3gi2ELlJ4!)4H9=?WiCw%$l4*1H= zJ?s;zP3&gXz3_B?7`{sL5PX5=G4@5(qwr;!7vU>1Z^9R2wzGRxTfutn2z>SBUHHPw zr|fCfC+xSXqhJm9Ay~(K!G5RulKo!w4f~7gBz!^TYxr8qckpGDbL=0gAJ~hkv+#A3 z5l*8T<+Q2`?7ynZoD;)vZgAI{KjXp$Fm9ZIap!^I|*!}xPijDd?~LbxO*n2U#J<0|z^^^I`9+6MIl>eu03;G5JpJ49&w z)Em`ztM5`@ufE_A=@72r)Q8mN>PmHnx>!9?U8>Gk*Qtxt)#@De1a%?YDO{i~Q(M&~ z>I!w9I#->gu2E;h{cR0!Ghv^`U;UOtj66 zU;UB81oc|=9qRS!b#Sp5+*ZXfK8 z{)~x6L7swZy?=$P)W+Z%a2J@WN5OUHWttYvY)z+TP;;GTo#qkE zb8vC?9?iR&qneYN?=}BuS*?rKq)pc5X-l*dwNtb+wez)0v@5l@YwyuM3RgcquYFZ} zQ2U|wYwa2B&)UCrjLuQ#4VOvB=qBhYbxpcq!C}l%sabI(XqvGrsF)v9>*n)D;;lh-01j_<7USf9ba?Y z=lGuE=Z@bx{@{4Qk$2KMc{mxI!kl8A%ucyZR;P(h%}z6&x||j|Ep@uV={BePot|^r z>2%2HbEof}E;&0p2RWOa3!E#Q>zrGhXF7K{_c||eUg^Bn`61`c&d)h-ci!dvw)2P1 zC!EhX|LFXOGjhJ-qH%F?@pdt~M7kunq`Bm~RJhc+w7AT4>2T?Fxz^=Im)l%6xZLOR zsLRtXFS+b-dE4cP%SSFJT)uJn$>pL8@5;J5x_Y=8Tq9i*Tr*trTuWRhx=wPP?%L+s z<=XGM*mZ^LO|G}Q-sO6~>tn9ZxNdjd<$BQdsOx90CtbgH{mu1n*MHr#ZtiZuZV_(r zZmDkBZiQ}DZcT2}-P+u`+=kq)aa--S*6nV$2izWad)956+iPxbx*c&l?smrQg4-2$ zjk}Axw|k&_sC$fimb=xx)_u17T=yRLA@^(CSGwQsewX|G?vJ@YEk9Ryi@i^)6y~ia_C(l67IL~~~63;r%X`Wr4i#>1hyv=i?=fj@Qc)sMh z({r!qe$OMGAA6qk{LS-kFUHH!%hxN+E6FRztKO^4YshQ0*MnZ0yVEnE4^>`e$e|F@9o|@z4v(^_CDr)&ikT| z+Q-i)+Q;mZ?^EGZ>(lJh>eJ&h=(Eb_R-e0l9``ze|5We?)&w|Aqdn z{&)S~z8YUQ-(cTp-!$KB-(24k-xl9i-vQqhzPJ0{?fanb^rnzMuMj z>HCxKML(6Fk6(meqF;tzkzbA9Ourt#rGD4>t?}FB_mJOney{i)@H^)BrQff9m;Ign zz5E0Gqx|Fjv;52a>;0$sclh`E5BV?iztR6L{|EiI_;2&y<^Q(-QUA~VPy7Gm|4)EJ zfL}mZKuSPnKzTrYz>I*o0lfiB0#*d947e@e?tsSvo(^~|;9$Tf0cQgK2)GjH80Z}s z5Evbp8kiGU6j%{BIdDc`dthhaK;ZJgTLU)*J{I_N;I_b51NQ_T3OpKkBJgbB4}li~ zc>`l`Hux9<4UvWfLxv&8P-Lh!v>0X^x(o{qOAIRww;47X9x*&;c-gSWaL91f@P*-= z;b+4I!)TBu$R)@pC_E@JC_Shss4}P_XlhVY}6Uu zjCx~`G29quOfhB|t;Q;2gK>(n-PmIsGG1d`X}r~VH*8?MV0_v5x^chpUE?w1=f=~< zUyR6j#iTL0n7mDarU+BKDb1)$j)32rx)4w53unlAg2@6RGNh4c8bs;Swts(P6mcXXS>X5Y|cZWO> z@?^-?kXJ+A2ssdPG~}z0??V0z84J~gx`paPgF?eY<3iIz3qvP{wuH8a4u)PAx;k_% zY!y8a`grKGp)ZH-4LutAW$5|Pe?l*Zsl%LM^C%!JJS;9OB`hnfAgn5^DQtRJTUb|E zf7s%%6=AoAZ47%b?1``~VLQThhwTeH6834>sj%~5zl8l2b}5_*cL;Y6_YDsYj|h(s zPYurwFAOgauMM9XJ|}!$cyIV{`10^o;cLS048J%0vG8ZYw}!tOzB7Dx_`dLW!jFZ2 z8vaH2x8dK1{~UfHd^G$@ggU|@!a2e%!Yd*mA|xU*A|WC>A}^veVq!#HM03Qnh*=Ts z5%VH?BZeZDMO+tgQ^Xw+_e4A#@l3=^5xXPyN4y{LS;RN65%f1~eK|#XMj9f+BC{f^ zBikaoBKsp3N3MvxDf0HnyCUz4d?fN|*cjUz`A+1gkzYrCANgA(igJkZjS7v5jj}}L zMU_NVM>R%Gi)xQr5H%QeZPegA}tQSU^39QAe7Pf?@MY_wCfS9DNx zWOPz=Ms#6xRdge4tIdmE7=2CjtJKNkIL^h?pNM<0m(Ao_UpH_<;vUx>aOqmA*1 z35K{^#O20W<0i(n z#LbRd5VtgLb=;kC8{;01dpd4M+`+gH;=YVK8~112m3Zg)fcWJ2;`rwH&iL!%Z;8Jr zeslb*@rUD2#Gj7;E&fu1F2Oq?I3YSAJ|QciB%vXpIbl}9yoBL|l?iJT9!hvD;l+e~ z3C9vnCVZE0A%Rc$H_;){CowoNKCw8lIk7WwMdF=_4<$aExI6K1;<3ap6Msw`Nn(7%4ClYU6Ln9L-*CkG~n zCPyZxCg&uVC)X#>N}iWIn0#aM+T{C^wa#$v-3`Gi&xR8_e=ghyFM^m&Z`jmi_u$1(aqLjvzIVpW9OHyt~ zxhdt2lm}9tPI)fn<&-y44yAmY@=eMQDSWC!s$Z%hH9FOtnw?slTAw;KwLf)L>blei zQlCxTnfhMpsnlOn|4CD)`J@G>C8g!0)uhcx>q@&WZA01cQmb)yETDDqtSoT;BTRydXYxzB0o$i|+k)DxW zkY1f$pWdF{m%bwXw)A__pGbc`eMkCh>2IaKpME_3eEMJMV;SxlhKz)ayo|bxIT;Hx zuFJSRV`Iis882tNmGN%ICmH86e$7A`|7N;m24u!&PRK0JoS4~^IVZCtvnO*==Bmtf znGa+>oB2ZK?#x4(M>0Rj{5tc;%)hgkET=60tgx))thB7$tkSH;tT|cTSxd84WZjx| zSJu5*4`)4{qhi%6>2VMD|bFS0=bkFil9CP&Q%4g#HOPPIzF#3lnxtczePJ6F#4CdcyA$#&R5U zd~;%RGINS@CgwEf%+Be~S(bBi&fPf==RA|MJ!enOp`2qmU*??4xsWrKtIhSuHRi_W zrso#qR^~S5&di;kyF7Pw?)u#Ob2sO{n7cFgVD2Zmr*nVK{X6&HJjXomypX)OJWF1F zUPWGW-t4^YyhVA}=dH=RJMYoF7xG@qJDB%r-nV(bQ(d{#*G+@;}S}KL5{rzCcspUJy_aUXWOjSx{2YP%yo~ zR?t^)ZNbe2_Y`a{c(Gt-!Ty5x3qC9Ow&16Ni-l~VS7BtKr7*v+qHt2-jKaBv{e`Ov z?<{EY7POMfW+y>zrpQ|40^R2E&9RF+XzR902iQr1z{SGK(D*0N1ykC#1Pw!7?T+2>{F z%Kj-=m%Eqyl}D8)m1mb1m)Ddxm)pww%Wo)OTYi7}8oL&fHb-4*Xud|mNl#Xpr?rB9`yGNLl6vaqtIa!RGGa=7w_ z%5{|wR&J@>Tlqodsmfm~uT;5InX1yO%B!YTbyh8^x}|Dk)zekGt3IqcSM|?Cr-{ai z=@TbTY@N7d;vEy8n)v3#&n8}|cB~GmPOC1fo?6{seNFZ4)lXD!t$w}wX!VcPBQ?4j zQ_X~$s+#(mmYSJ09X0(mD{9u%Y^-^*=Ea)VY7W+XUGsCzg__Y?RjsbptyW(fR2yC! zSDR9sRa;P7R$EisR6D)4t+uOnaqWuQTWiphi%c?7=E32!iYpR=G*H+h6*I&1|ZbjWqb@$di zU-x$1iMn6v*?N6_e0_d>Lw!g6;`-a_AFAJ0|91V6`s4K{>(AH!TK{+b7N);4Txc&OpYhF2O6GE21tPuf4}_@tjFU1{`ejA+bkoY*+CvA=OuTZD_i$>CvVgO^2IKHT~7BYYuKUHk!At9RajZD@F)x2U7bHeeg< z8tiFpAL#2X8i0`2_TeF0QQLrRk!{{U>mplGN8gYlxXsqxx45XiYoL9&XKuG`351R( z(H!I`LtWh+HboMetsLIj*SDaxtq+PKrP5yo8#Fx$8y8h7ysLMSZD7dOK{IG?#g&1; z*7kN=?+|uT9_8d6ZQYa&p=y2e`g&~(io3gdZ6s>3mrko4W~rQqR0jo(R7K$@5>xoa zTook3WT_NjC%{rg8CaC1R7e7%g;5gCWqfg@2BoAeN=XsgQmqixx`HIGkP}yk6~+$Q1QjGd!eNELS4CUvK&2F+Qf!1u zl3S&iIbpAoq{fj+4wZIttfWj<$}*^wBuj~vw9=JHDROz>CL+c(K}*3&N&_s{1$%Jo z;CMneRyf7@gg;CdI0UC1kKm_VfVU4$4oplt;BP4^mJK$*BfX(l*2{l36X0TrCN%m55TjT9UeU90#>h zNwtcS>f~6GC=@hOP@Pyro!C-!M24jNI$96vmNO#}a6-yDjU>dSnGz=J2D@7aJ89z1 zVHlPNhI_hOhllD&gVx!#VV%sNs0l*0q%grP6{)C&#J|iREs`V@GZVP0qs=cMA=l2n zfnF$opjXNYdt`>Nk5(DGXa%v06jLwpR8RbqNYy7v?J-HrlomEgl=39I>QAC+#ke>T zmF*-&b2bWnyHN_oE}}Gz%4i}{jz)M=c%?^QBPmx7A>Mhmfu7c0V018PxV=dhD=Dc- z%)LobBqmaRn*<)4Bmp#$!kb_`Y8LC>tki~P65c$nm}arzh`1#m;ZO2WkxE{gk(iKh z+f3SxI;93}Ci>TG*8nu4ki8&ON^2HMp-9ZV8P7ACsWvrZWgtJ4SxGktO_Ue$$UNXk zgt->_D@L14@|a9GnJkGFyGSi2iwsPb1vW*BHbo4bN^+Sh1z;CRKb4d=RZcvW)@!Q7 z*)&r0G&#mJsVS$4xnKuv?;_!eT2xAs6?OM_w!+K|<{`Er>L_o8sRALD*t!K;Y5!nX zcORa@*%nL_BbEw*O`Rg85=tR)Bu7%p#Mb_PN~~_}Y3rcQnqd;HmZB+L#8=nTg=d7s zT~A4qI{S#Xv1^{dQ&THV)2!gAzLUnb3b=ky%+NNEmPe{N)YscLNU{^EqaX~m${x(T zy-#c3|!tc`8T{0D43LDlAy;vK}o57!&oy( zMn#lyD;A&GQe>&KOI0ceo`}Sfff*{Zl+bFWP}WOn7^w)f4Xy|vT9Hy3B{Q|&PSWYA zmXc+1GnEe5m6?UpLMfKgK|6X85o&r`X_H-MmLhAj7_a%i?NdrQDWf7SBdlAjgiA|? zrF5>Cbc)nOL=eSPEM-D!Ytx{8p~V)fTmh1zj5d8SRWNHYsf3mKim{B$mSR!^OE$?g zJvF0gP|l+fCPjjdGn4+ob7Ro0u9qX!ErbR@+HY?Q+f9r*PN`+Oq^MnDb99k> zyToFMEW1cPI5Nqr%TA(Ql-({ODH<-xm=e2a_2BDnWgeuU?xYl&8Q^i*0Y%JE;iOptLCO@Tl(#&wk9N*(vc-{< z0x3XgdLS>_0N6$NrP~E`t3cXczz!O(m*mqsj;mg&++IbgeR3>G&I%e);Xbk4KCxN* zNb?Z(`e?nVTh5F`zzGG`i4qENX{Ln9K6y8%Pq}{YBRbM&ry6}SgQ7msvMD!$TPjjf z3yFW3KguuRQp`-?u8-=MfP`G}!Wzn#cXNnGW=L9i6FRLRrjcU$C7$|;e}GhdKx&Tx zkw044fGA~nSSfbz2WVO`E>1*cJD_OJL7`&~N}NEilQlu0+@uA#3_{a~WCBWfZkZ zixmwdvh9$v zr9_JyPBw|Ur0}kBn?!WYnL_r1(o^W>Njg~;TQcZ!Jd<*elCIc&Af;I8a=(PKmQDFh zBb!9>I*!E5rW++CL|*A;l5cv7kRTJwrAu0ucKwh2qI6-uh_Y8ows9;ObcvltiCM)k zB@*@m7RaKd;vkzrlxMn3&8EW37Ffr8!9GrkZQOnlSw2&#z_t--`}9;AIgRWW*~aY` zmD;2|2bv{iu88g|Sn0~wN|(`TCAi}D%~2{e2U*-(GBA6vUm!F9<(W2zh13A{j7XI9 z6v|X7m2+uvztrA@jkGdRAJS7PD?ow5ZOTm~C`i}1?z z2^7e7SZs*l|FN}1IW40dsf<+3Vx<+$D78sjOY&w8sX-adzl^3XP8*W9mO94kWm*;5zeU0l15o5raD(l zIVq-6E~fPnRG;js!M?Y^2Z@p{4323u&r+I8Cdmc*Ux)ozmqIl=g>0IdQ;33_Q>n(+j5-DU)6p~mgtGeb_R*{KB*L;Ftp3!X^9GpKF z)@5@CT6^1}vZ-dWUkV5|<_G!+r3lvsDA_}{-g&lu+aOFP;J0rA}*3G4x|MV8ZDJ3!9~jv zsguaXlGfL6>um#*D6oUVf=W#r8UQKG>$Y{nSOZd*7^W@@ktG(gbAgBlIH}%6<20|RP4J_(vhZ!rj34@tn*U-{_SgCZic6W5mojW)ThmSZRWZyB| zKGfAagiXS_2VjQQ4g%_1hINaGz5^J?jfydDRDwyPVi#!=;=qlH9k@{mCXI?+v{A8( zG%9uojf$Q9-NS<<0Tvf^k@^xEux#ta%_jRu?TD9T0y*GaE3kp<8NlojOxVLN!X9y8 z_OJu9M=)UzyC{3uMcBg*fj#Ub>|qCG54$LPBv4{c@=^A%mt-Ih??4s;p7hw-O1op6#u!kL#J?x_FkwA$($w%44UV*)SfxUi#J-`Bc z{UUqdpzQSv>;V?o>zCN;7uoBV+3OeCgC0g2cM$rT$Q}eL*b{srd;Np8q`Ndicd-f& zXF2+CJ8{#x2Yge>i`MwTL41`bwi$F zDCyOfG>Fl;w7(OEs$nqj>i`=baLnzRH#}eyJh0P6sdV^(V0kP856LGL=Od0i_9K!a zKshKW$oAtB1mMvF5Ui*0*dY+8h$X7%6|p$DKy0Eu!|k+n01tN$b@g{Ig^-S}MO_^> zA&MBT7>;mJee>{O0oj2^9Av;*lr-30L{{e5L7r{cAuiUGZn`SR6ucZ4v6O_Y;xU!% zXGmGXlhQsRS@Yv~@=D&`CobmgsZu#|GO0wFBo!-@q+JAwH_0L8Nm!P$BfkG8Q}SiE zTSFKnd#PA*;tm9+DwEh{DDFMsNV0h71`g1QF-Y4R*rUivHid-%D+aM1)F>8HJ1b-&HggFa2SE27Y%j$iwvS{{i-{up zrFL8^d`i(N$WmrDK91| zOD?x>@Cpc*?0T_Vl!U^%mqZaZ_XIcHl*1lUoLEQE(w79lP(IjRi9uO@r6^I^3KwFC zl~UT_0$F56w#3D`30ApM8nMQ*F)qX!-`=7*E(F;%i^3pR48kl+jQ}>p_EjKWS_v8= z8584plqnLRXcp?H$1@~1$*@@IQ>3L;qWO!GCqDaX5t5*?#I2|r@yk6;4yF|nDu~Hd z7!=$Nwoe2KwLr!Wu@xjcWK2{J6iRDk2&={*w$6CFWE`gG!jfC+c(Q>qj%u&1)gmj^ zbb2t@UIUS8#Ij_|WlU9+QVafCQSqo5Gp2~0STbqGM5VG)!)8pC#UtvvY~D=56(@0n z?R7w^6ZI6kMRutjGp2|^sih4@+(l{$pJE<7^@k^19D1`C0 z(>M#7NGeH`zQTwa$5NiRV87TO?X9UvO!1Wz>=ET7wLIlm_9|OK!LYKvvN)uQlKO~w z<8tH-rA}>UYfTv&=oPUte|(N&Cx7W+MT{*8M>>~*ka`SC_Ng5bVT~BXl10;POjGhI8E})xc6u!Z*~KRr+Eb+C zKb!~Q(k@)&TVd5b*xm$j$6Iz|5~LLSoYKBq2$HHQT6tr-a&V%y-k6FP7lZB17{peS z%)c>F?YgXu;x|B5$)(3)VIgX8$UBpuTls$mNP!@y(?YPARaqMYDx)j7Rv4p0tSJ~Erlc;CSvmwv#UNH=s#uC_sg8pcrA$?n zLT%S^1Un@WEZ8w!S?Os&ptkJTAvUtYt{sz=$}3s7;{YZ5MR@ux8oCpw$aWo>m=ARV zu}Nqq*#aJjk`X+(WjlEA$foe%QCh=;S2BplLBe4vIAybV@Kl0y$=RZv8&5D~`*`pu zOyt2QSjmG+Fq8+EU@H$UrMWzK1&eubiAM9_gl!7JZXP_M={z`_6{oD=6%FXYDcaCu zs&L#2PGXua@;}a)9)ka4PmgKJBUbQZ3CO3P0zp(3G3#-W$h-{F*+;|F|5(@qO{kPz zAp*@l1ED^45sSp57RU&;L$JZUfbO9(zN4 zOp;F(zz3znAs}As5h@Tj$cR%ruXUOO?!)_%9OpH(RV}kV1Y@Kum5;7hLdyfappUlH*qha3bmOS4xB{CciQul8eL~ z;8g*UX3dmbxaDLcY#4o|Q!Ob9E>4bun@ph;avurT^YRGAZ0A2C>Bz{0V3u` ze(eD~@|zE0x-uz;GC2oalHx50F>Wy~SNS#vconZkNYSJoTP$@!E8do)1CY$T98YXP zF#V_T%HYija4FuDAU?&*65t>Iz66kzuS|fKmMG~$hR{3UP*xNYyhS0#6ZP74uK!2UsKuuQQGc+CQ6iZ?B=Pd>O7;=|ds=(36_;gnjU z;ZiH{#*1(c4X%zp_{hNkIPez?04;^Xj=^?(SP3&iygyG4rZMjBgBdwEpxN*twd5j0 zCLC3Zgd#z~@qsna$eA@@dTdP{I4&Gs!!Mj)!!JI<1|*zfgG)Hb1{XQY1}q$BgUjwj z8~b4=6C>eV8=UlL8yxs_8xTF<#(sRp4HzGD10pBgfbn5Brqc6naM6=8IMKC%haBRy z3uk2Q^Z*YIf$;HNJ2|{-7tY4og@e0xdTd8?!41;d(+}ss@FFp|ZA+lI9?5}^cfcc_ z>MEQDm@#6)Z}O$5-85bgnZy^3?%$NPwN!{ zc4uR9{BdVvSd8ST98LuVwW)=gh?Q8<&xBt=?XLp*r}jy-sn1~A!T zAejhje{kdd3;~H8lW2Axi9-^kU=x<%QW$lJ^`ky97wS_cRPO&^mIS|;T=Yx1iv+PQ zas|a&$rQ0vnIdK@Q|yF=E9C4n@Dm@V;$ITDY^0`Vww#+we1AKh#Phj2iN zUDCE7*}wt{Ni2GxaM4M0O7w_g5dFCML^qX{=p#Id9+7oHQ4}BhfPyJetPu4n)RPjV zh7iJKRiy;Ny_6aC2?i51!gx)lRHd*eMHY;do7m?==_{}0l* z3X*7|v?WfUv~lsM*Isy(W?u%S{WrsL{XnSHxZYq-9mj!`r_jr3WLb>%@{;NH{K<6t zy2$kZ(NyF64K12v{NHvVI$>WZA=#HlNcJof(*LFO!S23AtqY{#87z$9sW00fcZdW* zD1t`f%)~T9tzD9NwiF!?gu?g81_U3>`QRlE8baqgBvSuyFHAg%iJh1M z&9D=!F$o3FDi?Qk40RIUcvg~nTL&NE_7un8+5Z1)O?M9HRx%NfLawk(5waUz4-6K9lU z5svWegv>~AI&%pz=jgUA?(7<}VTy&k`wxf|Z6PoHgO3zV-~9(4v7NM#7tX;)j3zDQ zy>swk``uKFmDI{YzFPr4Y=I1j)XGAPZ^1`6w-Do7@X_*!@h$jhdBo-xe6&1b{|!D` zo|TwvTCB7@@{xvA*mxubkj}}V|x4}mQM!zZmJ|Zwn1}!!tOE9ELB7(qu7p8K= zVu#u`XbY zTLoj>EEr>5#~AAa#`skYjIl;wY!xup3GBy8ficz!z!t0z7z^cFgz_yy`4*vki%`Br zDBmKKZxPD3WRo={kqfOPUQ8;ycvY$N5)(`Xfvhu?e!SpR`0>h9=_Tfw3Ibh=DhOmz zs_@(CFJ;kQe+Alj{iWn_`b&xS`b%l!^_NoZ^p}#x>o283YsmVmBnkSfAc^{`AQcPx zOG)Y0?FyNf(qRT~L~IL21$jrAZf* zCS6e4bU|r}$vMtfP+DSbj{SntrVC0-1~H(MQ4C|Dd@_zZGB7VNJQT8fkw&{l#9HagZ+YXk~vk_5Gt zNifr(>|q^AO{a6ZbTUCK%PND-X>(&)2wcb%BHYLnLT_ZUCo-GxJxXTPBdS*!ceoX) zg;~P<%>2gu30H{T#U5aJ&X)`3(&38K@8ANde>J|ENX-<@kmf$k&ssldDsmQ>)W_r+%j+&f(5g&W}1Da{kdJ z)FsX($>mO$jV=dWwXR97xvrJ2x45oz{n0Jat;(&|ZN1w=Zcn*=?sn0gg_~?iXP$p~Uh+!zn&Wl7*CSq^dwu8imv_8(wRgSuWbZlN zTYR$MRv4?#+dd!YUG$auTlEk6I{V(_`<(Cle#L&v{eJgv_5aBKctCMLSwLODs({r2 zzXn_gWC9}sZwdSn?m2nPa3yGZP-~DaXdq};uupJt@Y>*w!9N)Dj3ve<<7VTFM&9IS zGMOSx38re(M$<{SF10-5_K;&CKZcBix`oz-c81;@`c9Zbm?11GEGcYq*gavN!d)Ex z;c4MjaR0`v@CD(U;l_Ano5p_q@gK+1?`%xc7or*dW^+VJJa@$2}bP?QhQ6Jq5 z*Qq`b{Y3OL(H};C9Q`@mZgC;{AGp^dKc*z6CZ<1TL(I!Ddt#2nd>r#t%vfv?++Hy) zc7E)l*yXV|#6BAPX6)Com*RZm!s24$O5@t%dgBJ-?ugqI_h8&(aeLxEkGl}h#(Tua z#An9m#ka)w#Sg?UieDCgL;QX5563?pzcqeG{Lc8J@!!Q?PVj;oD$EIG36m0LCv+wB zC)|{9Kip2SE#dWqLkS-ze3|eK+)jZK#u8l9{U z*5ucd4<~<;d@}ib^3TbCCjXPnn^k7D+1(syjx?LiIp#8Row><8RnLd>4!n*JFF+k7O%G_YbbK)H6wb#R z>-YjaGNORv$nhQ1dcFv?k9KL%JU$Z{_`CRf`3ydUpLCrO&C&k&@E3=XrvdqYS;{+_ zCg}K=mudJruH{2{4PPC|H=qdK2elcU_3tMKS5>x5uBv)#@&~37y)X8^9=v$uO_Z(Y zbHh+D8bSjXkqPC(AH#?7ef&aR1?A+KYA*%p`AvK=Kc7FzpF)NFd~_emN8DFPv)^KF8qv^Qdc(`VKw+3nW@H#*8s&KED|Kf__2^&@3l@0a}9m zg83AFE&TZcZbh}hXf6Etp%m0+a-KT!_Tq3S=aD@0JNkVjZzPWgJeD^Gm?szk9?3Jg z@%P<_y7_GMse$C3haLwxxg%$O&qpRcZM%lA-Bob-&p_mZoX?_S_;a@K5%(HVuNJMn zk+-1&{-A+>hwtGFfRmZXo%cWwO*EmpSsGL~^UDfe9mwl>zeK)@pTwJfMB&#Moui%H z^fAHcVKn+zFh6u9SIfVL2Kat-1^xz5KYGu2DVHC-6076ect1XpUj~1^C=xfr=xRM5 z{c{YOG}DOcXKE1Jw_|QaU`9?e2*E$^?LU?o`T1JD`9YM;>(N>Rx(Agbf7FDMbH3s` z-!$>1FKc+_smZUsA9(7E1IQ7%o~TdQXhgHMsP0BS3;FRY4E!2?0e5n29C7P1>-2 z*u*osCs!T0@!;(a$hh{8cadY@pA!!99=99!A$QH)e4wt1PtiD|7X9Ulk*3jbRC_sm zWWs0#YQ7RP)#*}<*7>cy@4w%&*I7SmLV1@>+Udb+aOHz*q>RsB>ZC`B$mIlbdDpa# zFGq7bf!VPmH*1kYJ@4{4A8+6v9z~CR$5)Ox4(Ry4TcFFtVc|T@7oaZmEBYRCnT4J~ z^(ZugFW+ZE%e3g_sr=1o5`Ww-KF#tqf&3&C%X^`fs00Om1^QU6sJi}+NH;vt`Md|lk zL>d%pxP0T~YxTS?|Go3)PP~N-cbU+}`!)GG-k*1cOwFJ=Xo41PK?lz`@r8Vo<|Zv@ zThke6tOwBR8h${_5Ad&RoPAOE$fy=&{)$v60mU7+@Ttbj-l%6ZPls;e15gaV92!bp z#Jfx~u7)OW(>e#Kf7XxQi>fc*tDWkk7F;8%`RbdTob@oi1;Y3?4NXV5hc}=lPP}G~ zHJ#TQc#ZWm(yTGLJw3`S(WB>4GfF~tyoh!<@xk3Zo5d#@_!%IF6to^j9dDEhxx^u- zL_X|E6N15Z-5?AO`TSD`{tPtQ@EBaK2m{L?I%|TuK86l!_!=#r#W&-sW}#;6MUU}^ zHEwz|LkorJQuug2ns;wMAX-m~0@I<+YLUc{V5c?LA$LOKuA3|ybik0Izxec-zph@;0eP&9hw$~Fxe)S|x8XEf`zXyN5&G-z1s z{Nl(LpwVYhJ6bZL8Bs&Em-46iM_@$H zkwGm#=Q`e-_u|V9d=$?fN1k)hW+(o}S5Y$`gPt^isO~^-Yxo{5JO$PB%=yc6)aUgBg8cIGQ@ zhYAOv4g>UPFBE_(Z-?gRE08}tVjAJkAGxCnBOj*aRcnfJLJgrsUm+DJ9l!i!Rxs55 zK9mP>W+0c6Gko9{XyHGf{NTHP0+E$BA}u`1uY*UQBVRXcMD5y>_kHmOaxftGgOesC zSI^{KO`t((C<&VV}ZW!xu%fs;Gaz$$l&Xb*LXUwce6~WHvFW*bA z->ntkbGz;3l_sX7C+RN8p+N0rTqsCz8-r#p$-+l1V^j)>iBh{lXkG`S5ON*|* z{03SQj6PWCq~V9P{PfG;Yi`u?%SXT0@RI<87c_i2_6}>&2SNO5i0wRfBYHA;)JcnC zPyc{Sue`MS#s`gPrtXew`qy_Gc#p(l-o4Yb8d?Z?@W^AA_V()0cgTj8k7!49{9JSm zf1W?U&w)qkkxmm|gJOb5u0z#Y{(B=1>S<`Y$$6w^v=vnMB3h2-qT{B~%^yP3fAuMHH=%aubUC_7ef{-De(9B1E#D1O z83Rmd`eAAogccYt#o|Y|jz5l;149?#(fkrB)34{tU^Wx5Q;YN`K0(eVcpOyoak}P( z3nv-*8Dpc+*Arn@WaeWFP9uwP)SK_VoTuY&f_Ysmbm#PcK1N;#VRE<03A*=GP@3=1 zd8kkazYjX{488;E{T-UdccA?yRH5eccn*~6THv2Ueoip>T?<2R0H1GiK7Q!%r=Qdx zE-I?4vlbt&J8p8*Uk>BFN5iyk3!T6}OWFD-(n#zA;~w4xu;4}3j58xHV~^W}Ut zzXhaPK9b8XxUvl`7@38vTK=skpf{AE7Y(RV%@@W(vCGl&UqLwwQQ>dUIhOOwW1#3l z6CR{Tjap>*?st&0+lhSM)p*&8I!7aQDB#>rK=b)Dj}J0lPJ?lKo(|dgAY_D^#ql0_ zyic=nwHEochJMCJ8Sn!qk9V8_icpFbLI2<@o9}$luy04#>?wWos|HQS%$kcYfB4ne zK%~j}l=o(TPYI}w+PvJLRm(w#!y-%mUe zw$Zdscjkcu@1BI@VV^-q%Hmb2ykU(I@XUv$lpwW~8g>QOh$A;$( zZ@tvhKF8M9wAA!bn&$Vd$4;CH{5#`A-gCx`Rm(e#Zb3ZEiR#X0poTp!uiv=Eh$iT6 zSvGjWTPr0@>18%-o9_&tJ~jw=QUeX^X!gV z6FQ=vY}@+!>o2{ycklMj>C-#rO$L2w3wHK}LEv89*xVp~EpRk8FGxKIGl8j2XwIeG z8?+-8SGH>4xjj;TX{%<#mEF))_o8s$k$qb90-6j@)2Gl=@Dys`x0(1aNABZ?v}h=Z zH|R%RLr>}85vPuY*_oB+V)+)+r2>8uJdu4zhIJ?g<&E5O*{DHVw3k(*J4cRYE8v^zi#`( zfU=RpUnt5n=C@jZVAsw=hj-3uXqY*(zJBJeLq<0~aAecS6MEh)=i}dz_REj(Xw%3e z?bs7))Ol&!SgICg;1)gz7EsaWc?LzzG4VUb!Zhd>ExHR`)*y@4dGu{mIQGUm?Z|7R z-5L~gc^6+evhQZy*ul%g8e9YCOE3AN`LL>*uN~Vqwo;GWzs?6f9dhz`4-;QC;;-cw zjP2KtShO&INI{`!5EdYSdFU)N(HHyz(05b`lP)^>a)XK7*bwrEM?@)7eTyP9MymKt zT6l6dm+%g?ypscOMnCHKY80w}<>afM{edhFckrL`4O%xn-+~|D?eLs{2l&V>zI-!! z8|D`129z7TtF~_1%=(5sGY+Fhn2n*Y(Q}i}^I!4LX-01SNe?W!qA(caC-LquUtb8^ z07jEQvD{EN8Zh$j@k{lYlg|SGp?lAK^X;Ath!!@*k_F0(qR~b20CM0jYTP1&zk%%Y zkoFJIh`{8Z_<)M$g|quiE47( zWJOR>Q88yRV0P6tM^wxpm~+knOqg?yxaPcubrmC4&(whL_Mqc3UKM>G+AuSJ(z)>(-5la!%O5>|X2ns~3CgnhPt`GBiAn4_ zJ4$-vjZ-JDU2l0ZJfdZ*h{#i|u9G;iNiHX{yZIvPXWf;TDD}3T7kQKMy}7ome$a?{ zb0(U33)Qqm>!uRxo}efUM^~D)Y>2YFOL;@{Z;NBBxA_p7(*q>SQ0q-`iKJl}mOcqO z*`3splXAS=oiAirwsL$w3E|cF;s7%nC9$#>S>Zoew4MaCa9UgU%rFnjU#&_`4jJub zi<-+O@vX8!KZg;lF@l!1T5RPAvt^sa7Z2pVG*A2JH3#L-w5s1=z1b9&Cp(gQHi18( zq2HWz-T^UYHc0w(;Q2X4XlzR2k~i-GQ(doN^*vdlRiw4O9`BPQz1e^0uT7g~B&{;BnyPsN+s*5&=Y1V@qer;H#k2Yv<=-tM#FV@dtPf4bK9cQsnxK;{!+D?lU6LlR zH+wQ&k@AKTb1uze?E}3uA8H@yrR@XVg>5s&KOyzheB{Z4`$zmqWH-nA2br}YM&9YD zj!947v8rZXT&gpuQ&&@P2SyV>&kF|-$iy;?e@x$)b@1*{nlzc}vd=uNZlngO=S#BH za!h1p^FHxt`R}PJn=S7su}wF%tYWiOY%;6Msww&^ug!eIKImAbJa1c^ zB*)pl=&Z$ATwX8zR0jiU;1qsxlw z6C$xn?5xOVOB&lE+jo&IljLwYjpkDtOOVs3cA>U3k*$?%ODzz4spKIt=}O6xt-5WJ zI8~m+6Ep)3F_n>Rkp093tb*F+DPNZjY^{;6vDFo)vPfFCT9D`+_0+RBHLmR-SIY+# zM%0MJ3@;L?x>Iv{DN=WsRB>jL+NJennmm0s)666Cs_9sm^lsLrr@8vimugp>WoF?L zYfPec2~uWyP}`T_jZNA?I+o3TBe`b=YeFbX1>S0}c^;`=2S1UnUB!zHCSh68yms=g zycThB-_Im>*6eJWrs6HT6HZfJ0oJCoVFb(lVN#^HXkILr_*Df~>>DX`(af_H_o2Ip zM4K&PP9&KKe-W-#oDuP(zHz8R@>VhKScUtNrZ9{Js5N;B8`K*1-a6FQRCgvTb>S*{ zMb)(51|$v8N4D8=<>nvPSmF0(Hh?w#LK7~-OYX!Mu?$;HTTMa(Gi;q~od~JeE7#0C zl{$YX$qY4`#syUO;)Op2v63b}&LHj5(Q4k1G})K@Ptwp6_y+m}vjp}hd&v~K!K|sK zwjD62wCxpE?)c&bb6`S^7o-q5)u=&Ysl$sJ%*kAVwbPy0zyIWk{hi}lbymXulctqy zqmI=gWq@@i)KSvXv}M)Q@2G%Utn$68+}D&>l!VGUDq<@}R+VJUnp_peT}a87FVSKp zci>}nteEWtFQ(G62<}e$a6n*WpAMZ(!=(cxs+P`SX;uVdYq z2eHvP^igOx8dy5&)H#GAcvAOvH)}6g&RaZntQl{uB3WE|Pvg~)^CuyuLCiq`AI*v? zpEjCS(wh9Fp2k^wYLC&J^k7nXYxFYzP9o??VpBTaSs7^KcoTM(T_xbO9$T=;MD4{( zjpjvtc{4?N(SD}2)Q72FvyL=9Enc&YmcSI1#UU?HWCx|Z&XyIls9C%%?L6ttpMgks%S52 zAqQT80O6jziH`70m;21=sCm9LYr66wynDm57R*b(fA9R1mFAhOu9!4#c+yCHlfgS* znJu&E`OqX4D-_Kg?(p(@zM2r%KKwC%LMTfgwuZF1$(Fj-?mAY;HkH4U*elx*on;b> zwjE$+b__ESw z5?FJ2*a^}EXJvzxMnCV&&FDr4ss6)>mUAfGBXPQ~`%!^TLxT+8%33tG7((g<11A4xNloxm2 zggcxgaI?FaSdDW+M0H-+i#KMqX?59d(pY=x_zKcT<5}9 z53q^0abw05VoRl^%h>_B(!#}s$m-DXJk7uQUhD|(Pa5cXewO#6_daN{Tv?!_#xF)6 z`k`W8jCfN6VuuP1R;ZG1(t55YChTSjmM7~7uBfKsuC=dyeE-KKrehvY8kZ&rkWlB| z6UZjv#Z&csG7lm(y#;rFRgUQfn#Zbm)iE62oVZYXTI;>pB7y=VXe#tdBW+Rg9$&4o zvecEVmi_VDvy04!^v)$}O{S5M!K%ZRMKwSzTUB>%FwH4eK~Dm%mRN5uv#lZPVv0Jm|2fJM}^LI`U&m6vb zHTH0wI?dZPYIL^ULo?Sp(TeT*k{SQHcal7+S@P%rmZjJeKr(OO{$=$zjnt+gfYunQF@zMc@u67^*D2V>lQ-;W#MM6yiU&6u?AAiymQ^2>Yp9ob>=|d^|h9ndExaU_u3eGn>l(BhOK=|+F>Xw zQk9$LnRp%P+>|zZ8tCiC_iovJ`@oxH%}g^?WaazcAd8O|&Bvlucw1rw&aYV6BobkG zvNv^VUru~~w46x;=JWy;$s%qff(~Shykrw!$JW})kvzDLueF(I9g+Azy3Nfrh46zG zat0f%A`4H+vNRZyy;v5FjcB%!J`HH0%Q9)%dV?4%lYMH1k&v}us_H%x+-$5lp(AYV z8e((x*;qZn5*2CbXmH3(5F^WIPsn;4zp1yCCvjxIt)s0AOC@C8hux)~4PtvP5NyBR z@{t6}`J`BuX5BN)r0SLGS-Bj?o>tYN)pn>Sj{sgh@3T;jz((LCtG|KPWIPd>#4 zTOrnqYo-qB_mhoxZ^Le{V_7?I-8z!cavoc& zW-imX!z1perwAyugs!6lU(s>oli1Nz1 z+(05&Y^2EJ6=Zxo%hpw89^%dO2mfa7UYCu#cg|1WZpsQ1NqmgZRjl`r>?>97J$m=< zULda$GO|Vgp61?%MP5Y8<^pl&u`|XHM9Zs;sZ*DiBiWeDL9JrA-qelPxge49P?*Nd zY%^<3t#$eYt2@iY2Gfc*tN97$>qTfEOX7ve6tmDoP+Nw?3@4iMDrQc+%lm#1^Brx{ zwq&m@C3Ph8C9;M_k_}aIUqv?DB-u`qY}i{xiZn@*b0k|l$%gcD!lSpu2)Mgcf9B9m zUF)|ag;uBMQji~~)^?Xj96q%FkHfqaE2ihy@iWMK;hF&DK$%tpt}S(S+2a z{v@vVr#Z<@3z?3LZ*BAhXb>jAD1Fpee`0WC8GV`HYj??Ba%C?qIZyv$Iz%1wH%4XQ?uW*9Hd-LUt-^_?V^nDkn!m4DP((^PB*84t?S4DhQE$+gi0dre=G zv?ys&p}+goJ;2raF8z`wk>+1tHE;2Txy$tzwp&Kp7fd2-9wT|YH|zhBxw0THg7Lc&=ibQo(i~~Q7n!ub?7v!F`nuhzJM>5P zBzEuGyIb67^QBtiyB${^Jofq=bcH*&Z#QAgAQN%)13F%3Au)J0YoRA5;>FaA(npOI zCpKtOR$sQkrFRp}+7Hw+p@hukkQv3C&K4UhPiWDqu0E*6$+sHT&&ZxKgSy!T^*OWV ziTZ;2yjj~vO}NM%#f-9_>_q`QvcQNE`CLsVl>MSsekxzradF-ke^yyf>Wo7U3D>2) zA90Vm1BG#rBO;O{_9C@hI-{7Z23)c+w2Y63^^QnnGE^ZQJG&_o!lK z&ir6$R)PAbhGm1NoStA5Il*Wq1COT~+l&S$YW^fiPJcvwHBtUZ3#G(sjNrve@)$h{ zHx)=OoJf{7h4!K5D@M{r0I~aE>Dt7IQ~rAH5K@U4xKDN!=J5E+u}cfhnnRWgR`c(JF|| zHJ-AZW({<)0mCGQNp*kXv$ptC*Lf>S;Sak(W;3 z?)BT3sBLRW-!h7qr&>kRhcqcZijShn=E{bU&neJz zg(rkv*R55Ct4PJuSf0w$bwTkD2xzK&N9yv*ymEZqAoEP_repr3Hxy$d$UH}s z3}m%AAxnHD?QI~+YMV6^)r5r5*tRq3-U_4E>i%Yux^E&IK=@k=HFuT5ly4cK`~3JB zL7e^p&%C_@YAIzsu0@?v*6EU(OdhoeJ?})T;(be)E~4GV+}y(#u3tYAQ>s*abcC6k zoM>6rP^&j$jW|&f=jxMT&Yg^njTli@-~8BiNT;znS}*DN!Mw&a2B{MyR+OiRgpX6> zZWC{>KX*OHi+NRg%4?fw{6E&QzRXpXoz){OvO{k&LU@wSv70Pw&3QX(Cw^Dr_iTM> zx~7vLJ+=^UYw4x3Eo3cp)FeGg^chY%P+pc3$Ir)QoclqHK=q@46j%}1=# z@epP0YA4aC8YyaToH%*yYTSwN@VK}}ktbT+FtZ2pz*yt624tu))M~(KDd<3$bg=}r zmKo%AA_>{mZR-dib(*cb6=8bbJXtYTd9!4ZM@odlyywIDN0HT&=zXUv5Pdm5T_ntc ztnKebO?#uL$$j$V>sSBBrAqk+lrH_mKiB-msdd+V$B$=bQs~;cb=NMf&4(0x*Nw-D z%wJJzV-=(BC%Kx)M@syYt%f*R#Xj1qiv*)_C0kQNt$nW6s#vl?Dp!^&TSi`|z}af0 zvbtw#YJrEV>?5LM!Ru3T?us=>MUqac&c1-zmoi$#>6OLoyQQ>JtRz<__CH-sT6jG*Ya_9Z@sa~Bf=*uZ3A%<|x-~G76f%w#mhSB9O1nSg&n#EReJpcW zu*CYunZU>(+AwbpuPD!)A@N4Gz9M%dDkq9FNJto`qF${gDqBE4$koLZO3F2q z&eo`ylR>KBV@xpWG@h0LBuHP&3l~{+iCwdViwp7wuwEo+ESLD}ymZCn^U{(o^0kBw zYWZ#*>nabiRUj?DJ(=epPc6W*3M*Jv_L%1fOYvN=4pkn$16>VGRk!b5%9T~V(j#RywfVqfSA#pvo6 znqbyQAEQKO7YB7kKAt70HG|bU4NrK@Jk_k6+LGwR1S5S%x{<{i&FrQpi&Z!8)Uid& z10BzrwRs8bF*6gKwaA!$`ow+x{nK%k(+L5Un0b78VpXAf9$KD_Q5#eNEY%>NmuL(O zS8JNbW_38vsu7QDKzq>?n~6*#iT%uoGp9me{Jh2YGzBDSDo7NSWi=#z>{jUB}-e9z>FX+ya@;U4c zL3;8sLcf2@rg8Pph9WK5ohGR@y=nUZtqMaeOLV1Gw*7pA$VhnlKnmP!p2HfFRY*mb zf1|gsG$tTxD?iA#@=C;2DzW|SAm6XjDC6`2jnlLr_z$C++LnsEA=TVQH6KQA!g9q_6`|H6_;7kvmT1%*rmYw4 zG}c=(ZHv53`u$15`%aRAdYcuo_0MZ8lfGhoE7LYBgKf2Cj=Y*gv9E+3JfEvth$7a@?9NnD!*?pc*EzC!TUwc?=7)jbZoXE!2CH zsrMwlLau8!rD<(fL{03W4(H7p%P#60wwCRoeVcoT9b93~wuHRKD$Q*1#D)t;a%nFc zE9}qx%(jM8bpdDE-_{%;{zF!RUFt>}!uAPxr8P>+zS4-yBKsK&Ak0C&BC?{?@$zvW zWAhHgoep(8OGEv40+%ak;>idtlC?l%86tPkb=ceY%(*?=4jt*SwOMTUe({7Ncaej2 zrw{D9a&3RF7IB?>MmFxW>m;Eosq41PU9aD}dDwtH{rh#WuULO>p{|L|AID==)^W1m z&p55e&1pYYa}{fqzw>9D8mX#n1Fx#muK#UN;;;eT$C>X&iq}`~yKvd-N#kQ0GJx}9 zo+PJlBQe57j#4bIb!}0i<{|AL=5A8hZ&YhC?2gbR5FBm2&YI`Vny31F;V)9mYIZP*q-nl`H3=+h$u;l%2jzeYipjE z-&(_eWAQT8R3x}gBe$kaoidM;TN}upGE62bBcC}y5$~!hed^9jD}tB@D=m^)hV0Fr z3U84#acq}bbM)xNOKp#67}RWo=F}tF9z`Q%FdI^mq|yFnV!h5f4FUE|@_h;E&dqN_ z8v{&|rgF;;?S`ADB-8%sq|>in>d$M~F?E5esWEYX)riv!)l6 zN||N(bRJ=uBgu97Vx8tbgPDZ(_@p6&665>4)o`=aU}E$5Abn8mkvpVhDzJimt&OI& zmKt*;37d1(8k?k>Id^CB0;=L29q*;FsN||T`7pa>JIrn>LhMF)ye^NJV2=Dw)L3uG zUOL-*N>C6rGvo*IeI0>r{mC@5hQ5N9mkE}}A}0m2nnqr%f^1(u?m%PSG<$ zs6Md~X2xlvr!i^~Ulrk6YMQ3Ij9M?Q`cR)4FYaHMx`L48gH#iXtIBqmQ$>hNq%&7t z%F6X~Ht4BkUC5;3B0W5K;65?#tF$a_J5w;hHOCq?Dli0IC9{osTC8e=R~ z&@8>BmNS#g#Cuf~xvz@#QeG?HQd(!b&ML^)38ueE%bUE8$Jy2?+^m5_+N{z)8fD*E zt+w1;g~w5AWmwwkXn#VKEs7LxU$r(%tvzt?)agzKHS$eG-PV|8$y4)NT-mB@RbER&yEMyMoTwLvN*cmt_-ZReK0?ymyw&D#aB$mppjqm~X>a!5%6hcj z`aws){|iObakX70BV$)8s!g6ZZ==$hNoS%|4N2o5B+*eE0YGUAJ1A_WupJO)5Zy_1 zCed9)li`4EMCZU2Wr(h#v^UYUaKvt+8|l8XL^nY~If`deoSw%X2*NHnA%oJH(4sui z)0AI<=oz}#hv;RfP?2ahJX#N>?WOnaZm|l+p9ED6cBT z^Qk_*fP?Zjhln~tq$5Va5el;@9HWp$*N)P)!Z5;*Xn9Ikhv49YLa0gCk5gWC6h=nw3w>uG(O#6ULo|WzKSQ(+rOy)W zM{QM?=s?OpM|2RSseK)W(e*i$w}irSO8go79m3fuuIJ%GO>o}g;D1zcQDL#$pH3*JL)P6T8J&XDylIT*(Z$xw% z-P@SxT569dqSW7@K5(S*fLrvOGCrtp9Cy<7CPb-k5KZ(Tl@~+wC_TS`=oz}+l;~Be zQ!}DB=>6^xy+!TQoSwN)A%((23U}$+1G;vP`XrVpqjU@UE(?{>impEboE#_~M^r2%?TAwU;5pGclzu@pi0a;+ zXfWmfNi>AoZyr&lk5egcBZW1TZiE~xr1*S5q&`O?1d;lj-U#SQp$CO-lvK*^Nwf}) zSyvPmThV_NTeJMSX zf{}um${0y`CZfMnx(J006gE*sVxMh-G)+7-fIipGm*GPccPo97E38_1%Rg|_DXlS&9K$Cn3gmDUo6w)B9QP`b> zGaTIEV1h#hIMjec9XLe7p(Px;z@a}JKErW39OuJvH5_-t@i-i>!SM+kbKn#Jr@C+o zgHsHg+Q6wRoMyo(9Zsv@v=dJI;B*X5=iqc5PWRyS98Q13=_^DvL=T8Yh~*(xQ^uY+ z2a+?SR*>35N`TZ4(lAJ4AWefb57J^tt08TMvsIEbE2daBey@u+OGRg`SMjtWp!g&gu7r=QXoVUVxKb%j&`5c^Y z!ucVbU&8qdoNdsG(CVRW0&NRuyFxn<+A+{hgqE1sB52n^y93&T(4L0&8npMIeF5z& zXmj8)2ri@GG7&B_;4%*`$Ki4vE_dPb1TL@PVnb0U6xE=pJBoUt=nfR!hoZ+&^c;#_ zL(w}Z`VvLop{NC}B3zx}s)K8BxR!)#8Ms!2YYn*8gKIcko5Gc*cxSlwgX<%>K85RF zaQy_=?{F&wH&?jn;Z_E2mEl$sZb5L1f?F=!9pPRS?nby*gL{3r$H2V{+y}ya9NZ_u zeFofD!hHkW55q%(hZY{5@Td=uV0bi!M=U(r!J`{I`om)wJjTLfDm+r*u@)ZN;E@TB zBk(v2k8AL_3y&x8cnyya@Q|SspeqcW8+4(D)h?r-QAJe}a_2G3&fECbJ~@brUcAUs3i83WHacy@&6RCvyTXF5Dr!gCEgx4|f7lo*K;?@;0syz0O!6kg5YH3D8!;58p!De$@i zy*Ko}&<8;u27Pnr+d$s~`hn1ofqn+`m!Q81{U6XjgIRS(U{t~A z4x<^yN-+Av7z$%^7(2k20OM|$ykS}l(|eeMU=D-1In3E8IT0o2pkx|Ku0_dRD0vtq zFQ8Oql&XVL+feEbO1qVV>{NAIwKdR3| z^%PWJhUzO(eGRJbK=u2mA)!V`)O12k4Qdug&1lrThFa}Ws~c+dM6Cg+H3GG+qSg)6 zx`*1OP`d(Z_eJd?s67g`$D?*KYA-_VwWz%pwU49rWz?qI82p{!?*e}V{C(jc0{^D) z?+5=8@Lvf3UGP5)|MT#F2Y(p>P6%*CfF1$m5a5S^dI&g)fHMfVf`C5|@E8Ga5%38C zOcDD6y%1O$ft3(g6M>-!Y=OXb2po#Q(Fk0Kz-a0ecU8r*ub*`Y!L)7hrx}Q*2MvxPNiXzB>pb7|Th@fZ$ z?MKit1YJhZJp{c*&^Od`LcKDm=ZAU`sMj9#5>RhA>Pk{X!D#p!8csmN8EBY_ zhAYr;GaBwga2W(AAb1#pMbV{u{!_B76$MXCXWV5$=d6fry%j2tq_S zB3dA#3nF?W;x|N0LBv`_Y(vCOL>xdw79vg|;u0dV5%CZaFA?!KBEBI~KxBJFc0yz? zL=Hscctj>4at`QK%>cMl!ivD(P%pw9Yv#yXmksW zUZBxuG#1d<6^+NC@fkH+uO*oG(vL}?M_g{bm~s)4A6h>Av3TSWCh)I>z3AZi_= zb|ESYQRfhK3sIjCm4_xJ(8L=}{LrKxnsh{yzG!k0O|GKJ9W;4{CU4Q?E1Kjf;(l~t zM7tuo1ft6$x;CQ25#0jOT@XDG(c=(31F;R$VftYrP>5iCwh#7^LBs8sxrn}HggJ$k%HXF^(pm`{oN1*vq zG+&A4xoDn`*a*a~KMw^>x z^B8S%(bf%ZYoKjYv|WI@wE^ig7{X5?~3?=h#!OanTWrM zcA;puAMJ~veH*mzjP|$C{uw%qM~7MHkdKZcI=Z2w867L5V<0+4qT>N{DuGU+=rk6c zrlHe4bk?GCBsz~r=RN3r5M7+mr7OCuN0-0QRfDds=-L)tJE3b5y6!^PFX&bV-F(ok z8oC9cTVr&KL$@yImWXbz&^;90kDx~z^w@-+Wzn+|ditSf3-oM@o}JJ$0X?6gS5@@d ziQX#o)}pr#z5UUl~6#7j;zZvM4f_}@Yc9 z6NXj6usRqPfnl*2)&ax%VAyO7+m7L07;ePyQW#zt!)s%BLkxd~-(v9F0{mvfh;kS) z3nPwWq&r3~z^FnP)efUZW7IAD-X6cFV6+=X&&Fsg#{7;k7csUG#^z#Ne~f#M@zpVY zE5^&1&;S!oU}9fP{2P<{VbVTKx`|1DVA2CjdWuOeG3hNPeZ-_(OtNBf8%*wq$=xxz zHzucG@;*$?!sO$ad&BfF_ zOcOE9h-o!3EeO-XFfAI>jv}cEl3F3DBa(U{X&{ouB54|uQjxS4Nqdo$g``tRx{Rbp zNP2}NhUq0SeE_D9#Pms+J|ELpVftQ7KZ5BuG5s#4KgaYpm@Z?67iQGJj1bI-#*8kQ z(F-$DF=H8KY{!g!3Z61E3^QXevo&V+z|5hTIT|x3W9CfET!5KNG4latzQD|PnE3@W ztw?r2a$zJlM{-*vcR}(fBu_?iDw5YC`6QAVW_e*&XUtlUS&uNg3}$Cw_Gip#g*l5c zCku1lVs3HFy^MLCFt0b}UB$fbm|q6-hhqLR%)gHLcQOAd=D)`LkC@LeKOYMSBq@Rg z?pWZ31<N91F%_!89zGjRh%KuoMf{VZk;m$i#x9Sa22#u42I-SnwDNULmCpQbLf@ z1Su_%(jF<@kkS_^gOTz(QYIp022vIvWeHN&AY}_u_8{c|Qq@RxMQRD8mO`ozQmY{~ z5UC+ZjY4WGq;^DVPoxe&>TgILhtz3Ey^quvNPUmgZ%EC@LJr+7?uiH>WQV5u(T?c`eSKZEFFrav#?BqWlOPaC6;Z*vO`#Q8q2O? z*&kT;6wAwDc@-?Li{(wQybYEoVtFc-@56GsWWx$QR#d=>nphEn6@##1ELP0Gimh02 z1}k1+g%vARSZT(}Dp)xVD|4}`7FGpfRTx%9VpVgjYKv8Uuxcb$O~9(@ShWnR)?n3P zth$9&e`3`Ktm0Vhjn$!8JqoMGVD(h2o`cm(uzC$v@5SmYtUd*Tq}Qmi#tm!AU`=JL ziNcx=SThuBwqwm*to6d$wphCnYhPepRjeD0bvLoT4%R1Q{bg)0VM81??7&6`Y;1>( z>DWl|Y;3%bjW4nBGd8KQ$rGE(VpBzIs)<% zuyZhW&cv>=*i{3&La?h9b`8g_J=k>;?7~!rlhh z+X#ExV{dotO~>B7$Z$i37cwd#BM=$k$VfoOWMoW3#!_UgMaBkX>_WytWSm6CMP$50 z21AAwnGVP-jLag)G$OMcGOHmo0GSPu*$SCGkvRaFOOUx7nH!L~1DVH>c>$R>k@*0b zkCFKbnK{_k9{W~f-#zSmh<#77?<4jbvELi}%VU2P?4OGLv#@_Z_8-Ci2RNX`0XH0| zjRS!=5Q78laUdB7=HWmJ4ytfajf2f_a2*cbz`>_D_yGs=aHuE_HO8S;IP?Ko6_8a8 zS+kHe7g;-ym5HpQ$U2LxKXJGW4)?<0zBoJ)hiBpNIvhTUBNC33$C2JRvJ^+w;K)uK z$-)taqn&Z|cO2c0qggoSj$<)6mWE?*alAE-U&M(LIAOqv6r5O%6S+7!A14>#0M`w;-V@h1;`#yHaK{ZjZgj+r zmAG*PHy+}qCvKL&%`&)I2{&uvW-M+l#?5EARRXsr;MQ5(x`10ZaqAtjMPxf8TZio8 z$S#TOGRUrk?CQvFitIMXZinn{$nJ;iVaT3~>`lnNkK1P49*jSV;E$QOQxA6<;!X_i zw8ouy-06xt8*%3)?v}>g*|@tGcOTMCBV@wyvc-^T0rcryTRUgNL&`0D}w`ii%DyzPUx z=kYi2cL4s5#ovAK_i4NW zGZ;COkdus@WyslxoIS|7ft=^a$wAInd~m`C9X^!9hsOBO5g&Tt!*G0VZXq#e%g1>j^dhYy{W@u$f>>z}A6n z2ip&J73?n9Yp`#S0l6?_cgS9l%R+7rIUaHXL(h-&}uylu|7c2>|^n+ypEQ4Vg2Fq`-jDlqhEaPFB49hfFX25b7mOHRm zV0D4j57szXhr^l*>t0xIfeYYez{A1&f-eL=1pWjz0k(Rug~HYtw!yHCf^8CPn_=4v z+Y#8#!gd|D=ditlEf;wL^2#7D5P1!e*BE)tk=GV^U6D5oc`J~25P2N=j>s>8{4&U| zg#0k%#~?or`IC^JjQnimKS%ysd@qadRq;Il-@D>_AABE*?_=>j3E$`9`x<=ThVT3E z{W!kg!uN;x{tDke5>6^0Nk4a77@0Yz{RAhwg&I0>R;w;HVZH z!v)8og5zqz@tWY6Cpc9SoO%dOnSxWUAa)SMnSyv$5OW2oj39Lrq356~Rg-Zy9BZb1h35C}Sg|h{Vl@L zpqVFV(ge+JLGxA6+c?S19_2P&7vD^KaGfW( zE*4z339hFE*BgRcalx&W;N~m11qyBo;LxE&YVUJC9i!M&*9-a~L7 zAh?ed+$Ra{$%6Yr!F{FRo*}p&5!`PH?vDib*Mj>e!NX1PFbE#i1&<)Xqq*SGRqz-p zcuW;^MXgRB6h{%RM)L3S>p%G#8DtN6Q?)F}dZrxhDzXz+#5%OE@kjiYb#QItj|&^E z3rhdU-(he%I9&bXo^)Vh$pU_cvtSy5se}!6hzs?=)#lBArvS*N967(V_0oucDOGo0mUo zp(O6^`;jgLntkrRrW-ZK?+eG zG&uE_<7hX3Ctk*zchvLcJctgsykyVm>|Z~>n1aCC6|vb(>e8zvP~tZM%JeWRYToR=W@z^(yaV< z|G#TBk(Hn=+|LvTDN3XUc~dZmy{OTCrM>xmr9pqC+voWY`scEI)K-(m$FHm22C*^q z#jo@T+ZQTqo?M8$F3Robgg`W3E!SqtROAY(J{<#n!~Y_;QqA~Tvuz9=!O+G=kG%3eB~;+gWR4kV9RX5W^FY!{n<_$q!N9N6!9=f#l}#CbBfS-psd{m8=&VI z|Mkq9XZ$r^sibEHRh>ljuYwAu`8=b2npacjnG8}uY2~Xsu`Wto^0pZyu@l{&r!q({ z+9+3c8zfCFrBcNV(!*GaIBRsr&0N#SAu&SO?vAr{Elh@+dD(ZEY`A6+~=6 zxo89NE9DFATllBxjwCi53sA|e9{9lYo#Sx~`7yr=#rlcKiS!qxOk;B9Z zAz^gTFytmvO;C1p@w#brI>(!J*0avE1>Kv~r47eLZ8`tbU(ZT-TFgmOG|l*U^cfCjYVi1r9t_+E8X;Mb$ru+``^m)s5Qf zM%xB!OBx*2jd>lck+qBpH8?$_hOTbRZ)0%!lN!3Zk)Jd;O^H=57Pj{AQE1uxD>Tse z7{rQy6%fysYTG$`|J!qwWIKCb|A(Y~_x-Q5N*N^or0iAtnaqphc22*?`j{r3&h{;D zl>E(pmg4FEL#NKNxLZb%Z^~g*79D3gY<*$5t;-w5##u&5>?5nIBS*14`7qv-^6ZpO zzt?51`N!C>pHEtk(TOYVI<+>h4zqYtIrhqYHm*~r+vnRla#3TSZxrcWFyHKVGDo^) zlf5DCs@JKP8C3qejC`4G)o4obAvCh6f&WvP-wG}$6K9=TZepJ%RA{n&sIOB~*U?O} zKXF@4q{LWVV51K8YSXImsV*~LIzSFbOv=z0mY6h1AJKZ-*pz#&@;`m z&9W!I@o!7GOD#mKyG?0@~N+|wYPQkHqeD=KR$aq^KW(wTnR_<2T2%V+que_0JVi-v?wak5HIjivwK zW>rZtr98P0&-j;xS$t^T=`I)e=l#bqv`v@%zh}yS@Mr(F*fEAedL12lByZ6pMdc8r z5UGo9+crA-XxodIXe0Q=wnv+oZ4w`;yI7ElZc=ddl3Bh+$BqBp0JC`iYxa7~_VQmA zDktztI^{@~GFX2OUK+Fpk>u)sHSR6@XzkLmx)W# z^NapVb{JO?*Kfyn{M%A@(tQ5u{qZbqEB|+uVQctT-P7qC4wBUzG=$EnN*#lYd#X+|M9>*8OkZ6Ukfje|54b$MY}$vQUek zK~+`v-_(qikp8>ZIA)_$w*RhM>XTQcO9yE0`#)ZlBHC`dlzsOhIi9nxe+?AYrLeHP zMgK51dFB+AZ?iTvX$e)NYcu)&!){p`NJqm8sNCNmo>XKxS7zx#!n2*ouM1fDkCb1* zf>v;8ihcRA$1Rum5R&r>@)K;s2N~<8s#+4 zQE@3wPhzm(+Nn^oVU7qdlOO^GLx3&NG+Uk${e+kas zY6Bg!tU%@5W|?D%&>@qx*A@DJ-TN;RFJ-#QcN5h_k2u|*QlLqEQtE6wf<_`V|v=3zQQ2YZbK2KH;ALC z5B=5U4PyVkl#^o+`%-7_XC=C-3;weRtGkcUp20R-9q1x^Z<#Sv_vYE+O?S<7CX$X9 zj+4my?n;rIG>|_bI zkYBc@EqQsLYUZ-cq?}fv%p`GK*G^N~>$QGr`eQ}ylf+LgZ&Bt2Wpt;h#2(mr0j0Ln zgd0QI+}*A~QO?#zh9znSZX&H#iOcp#G-?#5FXBej{Rs;ddNr1EETPsDuk3eNhbma*3TvbP>`k#i?@}(w=G1^KHr-?BtoQAid2yq&zK2`U$k^b1ig7uJMT4gn~Bw3q>{v@oweGM(h zlWP=A;er&qZlAtC6aPG_um%EAeq0z^0NAj4`|qgfpJ%+0m9zXsppiY^(PAlWj8kjl zekh@FzjRNEDc%lt8tEFjzoCl7sp}SK=dPBSp7INoJm2z0H_bvQb+=EW-_Awb&ndBm;b>!mQgY^9)Yp?qCynH0Vo`z@Oxj zx)0w>pU@v{R-v2CGI_x+tg|{3Rue!!R^mo}Pu=LpO6a#6@}0?1Lb8haSLVTQcwap^ zZCJt&5Kh^F5tYl6OQooHtkz3&GI^(5^go5X<&Zl7cO$#c{#LPRbmF%rTSu-7I+L3W z`hkPb|BJczfV1Ix|HqY`nRUZOn8m$w@31UZ@2tgQ^}bkT_1;-^wM6fW)uOi`T67{r zqKlS75TX-8f<%eu%sg{9zxUj`EAolY@0)x-`TT$Xmv!fyGc#vSea_P#f(POzL9`2= zn@O$&H#6l~uE=HKN*c@vKW96I%Uj%LEd<>$!q5-t32@N}Ehsqz7XQ{5P0H@)8jsP; zN48r4_=;|)(r@NeSmsi4@>FSpZ-4?$7)C$9Q+enSLFP^aBupdgYi^{_PUD7ZRp$k~ z2cyxB7V>UU6+FZRFs^PV+VMQvW7V+DdiTINTzCF~?}IAs&bxUh9KCdh@B1GRZ>Kx$ z)SY00{2dlTDngyZTpdDVWow4*c?lkuX88HvAjyI@pbx2-s*Nlc)LJRy z+h-eLXM;g@8LJ_vQGY3L3@5G7q!2hohjVWD7qo}18?8$@=$)5EdW?^FOKgzlD*Q}L zm*yD*FshTek^(2upIG761&IrqyRd@RC^pQ;HWG_9X@Jp5<#llNS%o*e<2B(Yf5W>7 z;1k7bUCGx&Fs%`QH5(Rrlfx9=uo!V;s?a)@k zY5QKrEc5#1eyTapG zh4at}m8VJX{vbbbB=G`qjQBly^zs$v&mVEu6`Q)a*!S-9}yYdfw^tz_xqFB`$Ax`{9 zgEmWk>|Z;BWT%-<-Z_r}9 zA4s%t6WqH-QT!ho8yG~&WOM}2;iUOfNCHlgz}45#m;vw^$XGF=U+>gRN23&&U@14g z60<@*LN*I&N?G-f0hR0sOaB^m365xuM4N8n{>txx5Ny|3sgV$pvm%xrLbz1s9g^*> zI6XYi9Knf~#os#KF8TqmBKH?Rc*M#NuUn}R7heC6Z>-GsmCmm~rR_(_29@q`L3Mqvi-riFjT3z+T_3zxHmJ z`Pklgi^Ylm=~FZ1UsCrLtv1sc@2Fz1(a68GqJ0^po^%2p%1>~Rks-B_Ip7lf0&vy~ zTV>JJmqVhBfZUyooW=yz$P4g~z&F+vUmCpn1zeL<1L2SU0WE>UX{xFr9N`edjaN;d zTLO~nG$1yd@~1Ho@J$&D@srQZv4-34c;=6zQ&bCjXv_NGg3Ws5Q>1qd2&e3c8Da!& zl)+=^#kF_^g5%}J>Ds5y(8QU;;POS8vU2OxEp9|j8jiLJ_>%5Tllk!}9KHEOdYh3u zTMM#6207>qt{6hBvE%|un`BAi`J)lxk~Mci{OnnXtq`Y7TCr&|>Y77{G$699);}^D zOJa2L9((0QDwY1Y^624zR$-gKEZGO`KUL9e^4TN!w>no&<_rIJwNHDs+Ak<3p{aSd zgwQ0}Mqd&o1dEV09wHGA@~JjbKMh#?B~6b0$zg=y5*X|^#4NHW^$kIbe8!u{`2mN`Xh0ZCa33!nkwWYLd$e-%6nkty; zI6odT2oK|I`kx-I_-b7Q1t5|MXLxAx8r26Kfgv0#TV-DfuLCXiOQJ!q0R0zfjd_~% ziH*gkrUG2?SJ?o|^Hny$pnjG0^QT|s_>#B=*lcM*dkl}tpA7}28N>73a2`}1E?_?! zs?VfoxVhVmBeOb9bJKJx3*p|jO}bUj_U%&&0Vwt&9!^)9$NNNiQ5Jqi_lquYaW|hn zw#r~nS?1N(1Xz?Vx#xfBGxhvj?mq=~WwYCO2oSdg>>{ zC4oDkng|c$0GT!lZigIH(>ucl8oMSOWI7P{VI(}gI=H46-pGDRymX2(x+&A8hJ_Km zWO8zW|57_+|2i6IWJr$w@3g|0L*-OX0z@|M1BACvK|zg40f?LHdm^R`2>$ z=L18qAe-=9_Y4z~#A#T3)p*M$y<6{Q{Sic>W+IhmAKEQWr@iYzYS%&B+!=^l z^cPmW+-STqkA$)rM+Cit|g#3vs~-VxyueF4*W?D@qr}jSKdTY80o#2kKx%B7?KD*3#7B3DbwVt)a#o z)qhNh@w8ky^y!=h`*onF@4>})8hDL{JIU;Z1ZU+)js0jt?_8+3xK`FKq1v|t5lM2E25?y zvk&o3{w0=SWgm)fBh=h2*{WvaZhsDjSj%ursO-dZj~M9dWDWE6wx1#vyKwqLcysyV z+8@TOZ=P#y>jj-Vx~j#riyWAcID530YbsM#Y?!*ojm_ghB&J#yMt;%hn%?89u4h|b z;dvr!PVuhM)5b~Jus)GS%INba@D&eP%tiEJUSKmqgvC1)r-6`1h$P{s4iK}@>+5aq z5lt!9KCFkxR&xkunxj|?dIzt5 zeJ-X^lr;^37>0l~nT4Yuui$BLB5D5R_%2SmuLT*H5=5YQL+qCnq+giwp902kFB^!z zni8?s+;6l|smV*I=5*FAIBymC6wo&fprU*P@dvZTl_~F?AO=$}52ZUu?i&>9b{dlq z7gLkFpj*`30dnQ0rhEVw&)~9wk#z@UfaaQ9&t7_T&8!mwHeV|QrCLuPa9ae>+~yCU zX&pjKZeiI$Uk+~517S2<5aMUrVKmc)Gop5O@ZirE?O%o+JY31I%7>Cuel}JIk5wYT zpQ4T8bGwf1w876nECigaDyJ(11|n}X`l&Qhgiy2<0^jX}aeRIUVaRtHPY_2H+Dt7# zzOSWq;7~T^tvCYy!MYHN@qZUU)M*YB%f%;ZfPTUti6a4KX@v-DUwH+WL;l#Uv!6hS1XHZAAOA8gk+Ztd}j2i>$C68Q$Si?#tpY0MuZ zJatH`vK91^Mp@ap$%8-NVVq6?r=uMB^W_)mKS|-Vb5G;NUsfmmd(T2*`j6{18XGhz zt$7dbIA0)6MqTcgo8!15r`N$hbCg6to|Z0Zau*{R?G>LVJJe2fOAM;)j%zb#VLKf} zQ!cpHA3^h7bPD^wBk$*Z+#E6R4)fSw5c#YoA~lVTYd)!g8*ZNG(O57f7QiueA6kJJ z(VZ15TNmJVUs-RttZsA_ad_WBeHk_!rS#sIrVLg(n>2Q#D3#>j1|<<%jsseIQ-D{&BpVHpwwKR(W`Dj z7o5sxMQDs6le|{!0-DVT1mdPcAci6+>4*+Z7tY&?vsPh*kY9+Sp(TAh6Opq1OHG(> zg#pelr5i1Sc_Cs+0t0++JvFBih=CsbZ`xvfYMg)wdbs_^12pNqP|t7=oz~=vF`m8R zVNVr~z!8{{5CsvT%!~4_CHhhzPzY~HMU!K}xk5lcD;p=nc`xOJ7jDW?ikm^+10{+2 zfZCaUpuF$ua7oPb79mVqB|7GIzURRow=nBr^V~6Zh6tB=VeR?%KU%#i*UA-Z=Wld> zux;YljrwUs!P(F{YDFV=&Dc@BJLvV=bg0w;(Tw*er^z`|wsL7f3niW^i$&L=T`0U|X8CyN|yQpKU`cdG+Pp$Y@)x~Mj^D3!T z1Tt1y&Day3@`*N@bSK7v5PRQjOMMDOH{(*e5V(EJ#_PM#K1y)VXx=BiYLWawx4?ru z&{l_y_SgmqM&lR)OFrKdbTN|P4G1`0P}X8NUkw3H?d_s;|s zRXI57^B?YAI7%vF`z>55Ra5}%9C^90SM;=wA-tmF(U%W5>C0zJ2Ex8~f%9e$;?zt> zfIBy0e|xFDe1F-^V>bID$K|@5L61~;_bBLxjK+#gZ^ulH0XdT$m@VS5UZe~sy-8eJ zuF$SjIcQWSd7OFeKVbg3MmuoLN4c*Wkw~%O)bMOm5 zsbQHQ5iOd&X^G%@N{`i%*YkyD-1EOlWkemU3%uh`@NfP#foxe({?&T$XUoLdjqw-% zX&vB3wBT-dfES9|Z<_zSo*|<7pXWZWN0bn?$D;A}b>hFTfNJ~a8y|R&)XsKG%8(j~ z_byXzOtIY~vs^f?vGgbwmx+|CA1)#zeG%tL5T6kQ2bDrGrbB4Z3%EqDgnwy)zzhIY znFc)Z$~tXXpmIs2OuKT$S;j12k{^Hhz(2|6PB_+&o}^GZaJ;W`qz#^{L&N4e-+=I{ zH#B0aQ2MJR9nqaV9QdOv$q!>ZJsi`ZZ4!>F6=Kv3_i#FMvQeuH)IU-ZTtUpo%)ZK2 z6-kk=og6`BqW;Ty}#%X5Lm2 za|_Bb8YIb1?{I` z*;xkkn3~6tdbONp*3piXLhBQhXi|r6uikTNK}%yC zf7-99599WtxK|GcYT_q;5b)X~s3GNGNGbG%nmI#XHK(l)Hl3+=c9@wuzpC(_`F(8a z9V@e$DKa&KP-n;EQZrT>Eokq9S1%pcf?-W0!cXBdzig5Y&RJpgB0|G(J%ok( zm_J3dw|L;9rx1Cz1FaqC{hK=n5nMa0#n#FG0Y&BFPIB@7B^q*OH~6;cC==D!7b(s% z23Q5RU?2I~X=$IHbrb1r1Q_Y+25R;;@Z7qIy7eR(yz>8lGKRkQ!^o3up>es)xc55X$g5T6*W7HeQ_ZEnzZggt>nwhhgL#?!I!?sbCT^#&N z6nVVdK$8k1V0+3GS??pPP|CbHt)!72r?q&bhk6+mp(-g37J&+%{rdn)rr0F@^ey10 zk%5~qlkSJj;xFbqn$q`8E60DA4r}tWxWHR8HMu5MjlX=Orc6^2H|B+82ki=`7m~9t zDfb-ssR%2B@KBFGweES@l!OSwV)d7a&eSAONNFiocuF$6X8a88-oA9*Ors(OcyI7e zuKX`Fp64q9>XQ{814NbiJ=A!=K&Wvqtk6e6aBYVgkMTMO(j9yZHtLgL^Fp!}Wj2^jT2 zH#3Q|izL|&xrLTM$=JAoZ9mJ?mVB{z7P7jc$m8xH8$&*K^vGA|njNlLt66N#+DDsz zt#cZUYpdlNbc%?=!uz>0-xL(9dvH8VL2I|#eCEpWLywQ?E9Xh(M((TlqkAD3 zR$TOrrY9i@81(v9Wfhxi_~%i4w_KblbXYt&+sZk4vpoQS6LpXnd##_ z=N!&+ws}J^Dmmv^s~K0YnoyDkOUb?In9mlm7|ygyOS1d@d3*<2DwZ3-A3s_D=&Z9 z9BM4~uknq4!3EO8{fja+ox#)MWqFU&{t;X3Rq9Xe_CN1W-u15)f3hQH$e(EKufm`F z&|bLyR3?o(Vm|D&vEDy@{^A<`sSEQs!#v#F+!pF-ZLBwDs2?_7Ub%>i-{rK>{;1NT z&7yT(ycNDzdw!m94Y4QR!Nzq_v@5#oDqs+YxD9a7`&{oB z=X+9AZ&LE-dPi?d-Ot@fc;KeP;E#8KhH>p?6j~VEk1!8;&COWuVV?Wu25h@95A49B zu|5ZS%nhR*VIFuBdD+b|5GtOf-hdv2d91In!@@k}%?(?*+c+wQaJ zj4)3@O=_HaRNHkGk3-ew+@OyFUug6I*Bj>ANKb1s=fC2N9;@;HcLQQ~dnFpf-)mC! zHvtyhx@)fOY(J7i3;H1zo#t3`QVN~rA5XY;0rfK{4!6v0K}~Q8j48kH<4Y-HnSv$qv$rZvq6&yZ? z`F1R>>@%w41>7iYM2&XfTEvL54>lujbDU%LZ=$)uKAi0259VtBp*P(`5TDgRGkWkX zz6G75p0o@RlQUl?gdU!4t)_k;cA=hLcd|^G8gjbAew*zswFxah+&rYgO+>Gx^7%KhTz+oE;lIx z86Xtb8czhLF$cfv2Dx1kGL$A<;HQGr6x_uo?IcP!U|07#u5yvF2$Y+%^3h*LdJ!3J zDU=R!BS>GL;XvR1hJN4vmlE|^1Nu7)RdrGE$gAA*GZ?=5TUtW2!Jr4y1z;%y^3GrW zmc~WnUn}3{nXkz$UOhTdH2ERr?gV3kePHWcHjju=i1> zzSDEsPlXw--(Q}jKHhp<@^27}zc-D}Yi6bx zk5x#(0Bf2Z1j?ms2AO58wSv*3cM+#@kV;|P9WlE8^nIw@gd_ZPIJZ-{!;Om6%inPC z9~(|JmES)H0c2>Xr-eBw9PrrwwInTQCt$lj4aX)s`vvZDGSV2_-yd3;@(}zZ-Nuvz zZb00)6;R&)_J@9RHh&M7_FYx4;I1h4}& zwzkcg{7L=_noa;NflM*@=7~%BlFRPcpI{e&Ldt-&)`CWXebmAn27$hx0O)TM0Bt`! zeap`nTQ*w)&!n$qC|sc^_$wtiJnA-;@fz8Hb5vG%682*_1Oa7Vo~O5ugZoth+^^DQ zIT+E|o>U=kAUd1#W+RXD^F|&nY0e+*uoHz$n_*(7CRumGJxEyKlYSFxr}CK#(U-67 zzg2;LeeD1{4V|SIn)GdD=rRVb;BnatBQ+-i%ONFe!RT%u_ROp7YZdsi`R6|M7AS3d zxujP)-p|Fxqv&oNHhdm3eSjvE=LLvg}TkHXmb{)^B+oQ#G)AG=KCMFt%V z;6)J+=ZyWpq5H2Dhg4vcXEAzUP|!Brf%>nwpX+HZ_B~ z)1$T1JkDtzF~$4@Uqxb!xi=DJ0`^iZP|GxrF_?WZc1aQIK)C;E9zm1DIW6dmGQhL~ zS0fg7hjFQ;Vv@NP)xj81s+<7nD>bCt)Nq5&o#oQ;#cUqHoOz=7r z#Fryfn142>0rVTKs*b?&g{hTJEl+oZf1<>Q&L6km({XDqUMfGXa=OKVmOLmr=N`^P zYrj7iae=d~&zozse5)-xPO;)fxVyRt{QMP+&YMGmdQ_if2o>z2bYKg2plBBTXBxR^c zrQHD6)u=Y~bK2#BG^_h3`YeS!hp)g)GPesH{Rq-A3?GBmjOGhoXEh@YKTzqE0!iLe zI3k!q7hPXW{dnRLFPH=KYR&9l+r>%6Fgn$F7H#qfB^icg#}sD+@fp_6UYrbV=p@)_ z@S@3|6O8@}UFo^;uGjdioTF zhxX*sz=M{$_%O@~TGD6U$pi;B5qY4z(wbU5q%icQ6!07wjNmT7%(HTY?p%gFxZXI- zFV$0z)||iTqGHjvc=k;%5BoOyI@NH231HQ(N;qO18kZ8}GLouBXZIOmJo=-D+a$oXQF7!KTIK6DbkgLZb3H3IQr ztAN9ON2eM=98(IR+aFGVHyD|JoKO->hsJK+X#nD$U-%>BqP5*r>qjok=ik$v)81B%a*Dj=~;sfp_J7YG7P(PWr8u4~8-P+bliL0LYsy*d#AJ~#K_XB-ARgtEQ z88UgeyH4M=cM-evsPRmq1tseq;X3Xx82+K&zbkWKAkE_lgA96 zlHhJMaLaw&n8khUQzfEUDAd>sMBSlsUcL_>pzFThT;w~n#riK{aHL0uO94f9;MHL8n4lpfe($J^Sii8EhO8}LTJTy0;S~cBz z$9m5m-33q$la5lRngj{+2Y9R}EC6l*CiMVBJp`F+F9w~+Z=3G?Dix`5o0G7(M_sOY zn`*l#?h#@PSEG;{RXN)_FLjk^N^dSsUS8?u1Kf~1@)x`gCUJS_>wEw@&9?;>c&BJD zRqG~UP`|n5a^4Z2VwoR;=1*@rM+KWw?&9b5Bmml))Dfmr79X*z$i6Q{p8a*bJzJ-#grk#ruTOj>b3taHV9~p zn#mb4PUo$=3U|I-js|+2qj2P}6~Ec*tsSD2(+^9bQ`nu(yXXQBq(1z3zL!Q}&yG^~ z@p$lg`|t%9p9c(sz3wP4Ku%Av|CiYdF|orqr9V zQGOc!p4U0}rv0&jGA$Fcq**fLEilx}d*NU&D3I*hId{(c`Z#QxNuvj2it7znb+4Vy z-9%9lYQ#zK3BhX+IeV%d3ihT1lBwZtNO-z;n>?|Hy_04y-Y|QK`}~K)OYGOFmsJsp zg|o$D`*WS>rcC!|+WTFkcTwXmRIfP&d%2H%o-4o4PVVR8DVzbgY5S>O7@l@YPFA>O zV$p}x!X>`v?!I8y;!-{69-o`ybuPv3I0$>9pdB!Mwa<@=i#@%xpd-jF^EB0hj@!k1 zJr>laGa>1RJ3BTxxE&Qt%>oSZW-1%%Nd-z4p!X2ZC{J#PTk+5+57*X$z79nR?+4~E z#BrYdPh<%K>jzxDGti=v= zlTL$+9|yc5g0t1;EOqn=@=p5ndvMdYEcjtoPEwOzuKQqZFx6Jt3}ks2OOuOE&1vO( z2S@Zc>zd0dL$$aMUB|Aht8$&Zcn_)pFMzkyrdrQ2ti5CX7C#e5ZBTN*-* z>@?!a$3Wxld-@!RL`DAkBQLjDTVdO`uFZ?qtX<|^x@OzF{ch@pQ1lTOmhHTyQ!S-u zsgk3(kei23lk!In?bKXv*sgc2(k>1<3kP)_yyFxu(V!#Y6wI?XKho7vPC$KuKqYt)tzJhv@vkg2y7rBdj*mU;8njkOEM(;1n2 zSv^%TT!!h7t%~1}a+dswwdMLe<(Cy#o={pBTA$@9ck)`$39TfKU}QI-@71B*QMCxX zy_7ki&s9i0_#GHmIQrk=GSK)~!tZk_F33B))^mlILyIPwwg9vk3_Em%N?y8&J(K-L zIZ*C-%2(y$-n~BT{KApXsi+Rv-Bh7k&kEznE>b=Zm%yg~h~TbV6N zN~&lIhS+**a`JZSV@mMK7bvDEU&G|>&^nLClI|dyadjgOZuo{9KD=Y!wA$q6u$S;5 z)km}f-#CnuY5Tvtja&7_P^xGRg+k$<-$lf7{!HnnEvsxdCvb3_Jn+ixQlU8w~RzV)wO`SN2m7&cf?0A{w>s z%<1~WSfZ`O-J6vx!En0O2pxi(S6T>FA^6H$>4lO^*+G$QZQF|Mr4HRZdteVqn{(|m~st>?@}|^>U$zQDY<}PF7;MEGiwUHno<%F zN~UH4a>&$&r<<-Jsnz%nYSPa$JffiaJ#3&wv>0z&p;~WSEu;ye1r}ZlxXgH4w2*j# zENWSkt)>=iQ(+O6LyVB-S|g~J7y+x*95(i{3R+kN&3H|MCmv~fbXk_&EJfcY3HBtA zlf}TAE<5<2ZGg*Wrx7V`oz1btcA zs5`u-hQmfPkt+a#CXV(}4^YC3u75|F+?4Zb6x6iUA{DrZ4l<=aIc2K08F51k`e?KxkTIa+-a9Vk4lRMtS{%pzcf=t) z20%N?fQOL3w&(I&omFe3Zz!z)hjK4~b7sTPqtG?$&98r#JQRb&(I+7_mK-l`_`hxeL-P+>|Vs-=9o zGiW81B}hmKZ$D^Pc%G;GZe^`YAoE zlM>e-(}SI1?)ZeHS>tqWs!U$Ab`rGO$6f+{_t{Dv+OrZ}_Qkz6)$QdU15@3Z)zM2x1!qupW1NdlQOltZTf&J9%6$Esw-i%KagsJ)11*a$|! z^HfR?MJ-JROG334%3M$G{jX2M$vF@=u~2rRUi-0st*xN)`iu;j0go&Hc1AY+4~Q#^fcx9o0`-h!h;Nz0rM{G5mk%V z*Dm^aHLL-YV|6Ku`_w1PHh-vd^Wk~H_$%t3VZ^Hx{xpp1zON4%-7%q+n=>^(JDoG@ z4daG(@1;}6#V;Z0Dg}#8V8n;GEtq2W_&z9@uHbtf0eROIZlvyk#OTV0b!T52LiL6{ z71SPnln43tsvMJ$FAoHHTsZVsib>EDsklmY);xbo1>95+AFi`T=MMxNH2sCjr~Osf z-z51T{&Iq*l+#?s5c3r`| z%EfhUtKmn9XKwxKes-o@p+xWL z;I3uG@-F_8-=bR5^9QG3bJ5C0cfj!O#;Z7tGgs%#4ZO}G=5z!42rc??Iq2mrvK3rg zfO&I+?}3}t4cZWi;9ljU9-DOTDs#Gi z`G2cW`=2!62o>knG`))s1H^W;LZQ~%Aye?Kq~^g+?qL=rkLR^ZfCT;xGkYU2!6~qs zD;RKm306-AD9->DA81Rb{Fh0ZGAP0mNE?!7E5rpd^uMw|MCtEEIzA;?Z9&5YJ zSm{o86uLDB#=%|bI~Qz!;-Q{-dybc`fdg_);X9MTzU#{0x%e6l;7)XRB4~y%G4HB} zL(O=cE{qdzkd;A#d)lD_j*`HeduFZWU#wL&dz?myjM5?mk>Q>@8m*06+#}bbK1l=P zUDcYmDK~g*;_P?4IC>_pTs`e$aNPa)e)RD%pAI~(C+zKesP%W?oXyRN@0nrP4**mN zz-R?Dmd4ARU}cr2$UIWOJ0x^1_5p{2K82Tt$$Y}s7?YYqQwt$w|VQI;xAOH^J2wyo^k45ehGqaEGM86oC&oYa!l z3rMq#Za%|}Ha35ww>cXUbN{bzk)K-FXU-0s@PA!?JJH4$Dxt~%ZsTxna9E)LZlRvC zW@|bPHY)eFN+~UEo16@8&Q;?&<yP5Dz7-(hoxSz!6q6>IhC+G(rN{oU0A6L|P4JqdOe% ztsNYqNwrN7P_V2O4^f&j6TaLEYbC=WoqDUvx8(F1HMobbfWo;-7e(3IHLj4`Ym|j> z9VWir!4dJ`6Jg}ML|Kn`=gFLX@3Uyi>&DJYhxjY|^||0NmkW#ozB5LX+wuk#+_JK! zgij}LhZ6|K+Sp`(-8hX(z%?vHjEpN!vl)t4tZ^8~P05lh@&MJREZ3@l_z$jFnX~9z zPvis<(5Dxe?}vMNL>2;^uv4a!?!@_1^q%M(;ZGSly$e!vE}Z{Gsj$)o`!DmgD7&u6 zDt28>#UYhyiEt`lg$p-CKW>}@1!z^WM?@-rYS}+l57-So#mTddonn(X3BIXJ(N}6i z11fX1Xtd2$^GkB+GlWBhcX=qz^j3H?hGVk@*8K`94CT(1@L~#8U>r4*AveimyR6`9 zVu3wWnFB)=`D&=pDUhUPAY*|O)WA67%2RZ3)USYt?-bK*>PYodJJ*sW`_ZYevJX6J z*FM$%sBfVBSI8Y>@^_f8tN!__RGG7;%$Jz11#K7ORSLdbFT7AxJ=iQkSb9Z9>&B}B zntu~vvyhz-n22%Zi8xt>3pX{ZgV0h)fH9(j?5yg*pEz!AkMmS@d~K!=#f6?47G@cV zg=Tta4^(y3(1Mmlc$#^f%{&*ipi^;3a-6vl4>j{xL(T1>D6l?6xTgq@C~qzV=lK`} zNuA)P>H!#jo%krY%yq!m-_%p*TE3=yNUT)jQez|VS%Vh&ES8Us)p*~g|key@b2Qx^8Ugvb2Ow*rm?}7o6 zyD~`bKWS-KR6@IfZ|t4or<9_Uq^;BE+x%$7hL=V!g~sF9DTeNsPFF&{43yQW*Ait1QO0XE zuB+~`U!n(?#<~vC$b8>`E`#$<7^HwDDnlmdsikwsb?7-#9v2n7@e{r675#kSYX7En zM^tt9?Kf*~f1Qdwx(CgSSQkx&E8uG2mCl72x*9a?+TO>>lC`jRLig@dy1RK6mTd{_ z?K(dLPS@NB6%>U(9SD7rk-XCdN3~;kgz%E`a?Qv0yzTBh8A6i#{@s;(Z$L_t&$JIA z9xn4j-$onBS!6&_ccd)b$(tqq75I|?`zxPu^fi(ZDk`1-=T}7XJXPc}H%R}c^2tz} z$N`<1e)vW|;Ac5_ApAZo(}FGqPA(hg=wQTQ|3*Qr19Jf@>nJ*zEEzxA7>Hl<`2NNRa6wO0D?>dYnM%p;fDu;8GgW=HKW_UZqObhlJJHhyB3@H$p?LhojZnll9(*S*> zg1)b~s`z-M!rwCdTi1OFCOT!#4<(!ur9;6@&QstvdB@N$2Ty$urZ#10mD~BQ7Btsf ztgdTab6KprL95YyJLxeUyVU9i{$ma6qnx8;e$O~6@hDkju{lelWLjk@H- zi`f7H`9E~pxgy`Zyc8IRD8n>~-c1a6*>0Cz{ zx9iB16K>nhJ8*_6;1FfP!t1cs7RTn=brWGi%0ejtriFz7YF2?HFU;?HlMgDehH=7F z#Thr!BYuS`*=mooEs`(olgV+wPKgsR_;_lxWRD7?NZPjsO5jCl3OFzLL{?5bX(Ogh1U$i*<6H&yD-_A!M7RG%=`Ti$?B(Y z;bnzyjib!L+*3{jbyFWW3x|4uKzRJS<%dg;$A>c*TbRJdjB;MDg%kK^Qovz zP1Gdi*{O#Rw3l)1n6zWUcCW}ykyZ=2|J8w4Z{YNxOm~5_OO$!gfFoXNtZb~q&eiO6 z!*DkagSnvehVq-s>pTh^_ucY>&T~6=a>0>ScIX4u>gnaW=GW9jaXvL(5_}pI`TI$?g7KN>O=$us;#1r<+htOGQ0+IMaum_tlyf9Akb%eTT z9jJ=dz_l2z{m|Kdm;iIV7^_k~>nmmI2h%oeaBZ5oVb*#t^oH_T+hpo`5qb@2X{h`B ztqJ4y>WKWbs8h-LHQoF(Z$VW*K<%&{LRHq1rw*9jub-=b;((b0uvk#kPUZ)<}UKTJf?H$$r#dMIM35jcSII_DlQAJik`^y7EHyfz(Hz81T!c{3PgJFC)j)y$$vpn4g9cQ@<0#hG=oZ9W!9kQD<|>?jXm~}g;s&dxiAw8aD3aC&A8zUf z9m=c4IGs%6`T#>J$Ftp5qLmkt&0+R4!;z+rH+^`siTu)N0A)pc_)7ZW&%tGjRu%c36T^-V zI}t2~A@{yKY15QVn{7Ch(S}2iAg02h+MMZiIMgGl$2^c-#WBe3<<&9oH(Amo7-~NS zpk_20{-Ck|BvtGk{5wF(WCwS~(hRa>3Bg4Q@-g)Y^c?A&e1vg{vev0UF4!$p=|_0Q z$S228gak@6pY7hRop|aOTeh=sO~Qt2FUN3%1ZC-bKjIxQylliIE8Z1QQFwaY*r}TL z!GdcyS3bCJ%bBlS)Dsw)i`T)Rq&CzJdc5PsY4Bv>lXH~aP2QiNKV7X~yBh6vUs+k~ zPmWP(3vfn_1}*cup;3kwZ)huK70mYFTm}6MY*HQ z%Z;KEf8;BGZiP}lUkJ4~H0q81XhENeXDZdOMvB1-H8jDC|!h72?V=a3sbH!LUAP*B}Jt#?*edk+U`vT zf2#5BJn+}^rORl|4s{wYj=(w4bV#tMhRtl8w4=!+&7P6uK6_^JID}Eol)R(v%9u^9 zcjd~5ikjW-_mVQX5O_Hp(y8&Mykg`4HEH_nX-U(9_bC8%wuJuW9@>^DN>re zbJw)pZft#N#n#6*gzgi;S%z~C_lOaRi6a1FVYMgVoOQ6=lk?;)gkvk%1kbqGpclEf zaj{?;Ecd1rU|qG~9UFov6Hd)c3jfv@?Bi`#1D91F$`@Pdw?1B&rR1e>g?6vQ=~R0g zy%(H3mOip|3Nr(xj^FA{hJI=MHdvE&qSJmXIUN9O(^MQ8s6)BFEv+o6y%_loXu_Z;M07_Y66DhX+A7^ppWv3 zt&i+J)`~AUg40Y6rqh7WcOQ~>)4s`*mSLR%4vV4+O`voV<`=qTxW=XAi>KsoiCFme zB)E5MVbxYRAurByD6_th;HjGvLIFpR1IRdGgYClxJbHYCcnn5kg|t(be);7{YtH2r z*+mZZDwV~nGVB%}fS3I+zT!n&>nx~D)x7=n$xzF0nS2i0vNBhcsJKj%!LELfUs744 znN0f0bB}*K*oNI+Uq38r1o&tMO@235pE_~I^r`5Gncm!}0%6JuJaN9)strC4+#(&L z?l}7|x5TM$eN7PxU0to3Nn9zplfu2)_Tm;^o@5;7h~%{*4jSJtFgG@?0lp$!bscyD zhYNK!d6TnHYcq>Nb^1Pm=KKMdWl=XnQAAFtZja*6w|S{0Xlu~eCwkyK9cATo`4n5t z`l*q_2ve!?_=l04-^GQBL~x^tUOqNM6$1e92PhnAo`P|e70r<{9TU}X;J*7E*p)8M zSgi)<8}6lsjleWq1B(cFmVnA8G@QJ=#ybTtz{5r1iweQDa{*#_NQ2eN=h%$JXfB-k zh2XK4tL%>S(}GO(29BsBrGYTjh>hT2y^72gg09Yxsh;&cqEyS=(NJ)He0~;tyqSx( zLcEoS2f`I4#;}@fR2IF}<9#o8+T1F!Qm)9ZZR&@e z)8>>f_BI!|t~cK#mEYg%z&EbTTR%De^@diQG098iU^OzW7`?@Cds8ki+%q@e{3nZ2 zURbU?6*Y`yDrFEew-VBAf5q5>`dOtKpE?rP6ZVG96 zW1Q#j<8aGK;daD^Y#?fetngx0=78l!9(xXOh57az=rJ?_A=Lz9+60m+sIIPBLPfB$ zM!P_@CjAkjM0EtG@*{cz%hVw53T^3C^2F|(+1Th4CU*u~F)Q}zOU z%U0|%d7FEoccxA}JmvT)7uo>+yzN%xL1p&BMB)QcP<^e@9nt8YU+YvSbn21rykUGr znc!C=XyP)*gBQy^PmL_`6zT|c&Q^lAJE!?O*9luZ&^cG2b5D)zfxgAdQ>R&(c;)oz z!RX)2nIf2$%}fkN0IR3u-S2rSnk)qUx#JRQe`_;R`DQzw;(Td^WadQNem2wb)NEzu zRu?o&Y6zo_7qG1A4ar~TLYF94V{aT)R>vKiY@hp&AzkbbbkX^^D(5=k<;?=-QjbBK z2WBQdrIYa~GdIwlIV>pSYjhHa$&6+AaGK>F8V1L>Ann0fqjl1E?PN;Z!i8zX)i?QE!4Ocm#XM>TGy%0bI;P`))F<7mzq%9f|^_->f*3( z@Ld&$;3&JanfxP-RWBVo_RYnnN2^wkX;v-zcvI{q&UM5@(CjYQ9`nDyL0&g`zOTSi zzK$`RsX9peh(;xoI8S0^PU><}ezyG(so#1j=XX~?Iu*AW4z-}Xwv0alYFL#MK*g6! zK8lZXF0NAJT`6E4Tq$ps6s7KjaSy zSK5M2Q_VPUY*gog|5=SHm}k^^a4;#Sq>SAm{%p!4b`~ZxFKHdpGdV_Yg_+BHKdo9Dc>45V4k_!SF7E*w98;X=&u>UOmF>c?X)=weaGg<~jddc1nI zrYJiWgYAiTR*$7j9{Xx^Rw=D#Z1N1dzjgtH%b5V%IA zj_`}rS1H$NaQiTgzofu*1|-=tU~wC`;z^t&<{5gRKBRJ%7qOBxiY-p%tzaIpQq-2X zfxOzPXJw$#MY){NxwQo$^Fxjl8EDKftG+}HXHjrP^S`fC>(29}o(`Wz(V-S$g8g_-_ zkvM>3F^g#V)V>(|?%Tyfo=^zJvYjy{$?;U&F*2&9?q2GC#|;9_0m^3NWF6mR$E5a+ zBHSH1ul=f?ez#QA&N6PG!VVBcBWLT>P$tKmk5686@7p=3)>k+aP17h#v#=Kfo78#% zNyQNL((&WpUWz$hwOW&ys!=EGNw#koUlL*k{%M-|hUsK%&N9i>xD1wREpf{Pi*F{F zu`@x+HUyh`7I;6itX=rLy~(c_%T-MKY)T8Si`dfYOa%w4etz!>gm!NTN0}Oj6T9+ePN%KF~+4{q=5xMfOFgbj$_KFqg7cLvlP9 zTS8kU4wo16R-Qmbqd6zOkOlmP%-?`}G>KrHg{WEK@@U!tI5QnFd$}(xDc93p4?M>8 z1YbC(BLZ$islr9&SFRvYhzu}WTRO9F=fU%?>vc{P_VRG6jr>9csn2PjBR_;*sAUaK zx1KlvP)i@wvL3a-R35dwRGxit;=cW!jwqFZ_a5TC5^%Tf7GsT8s`!P*Qx0DiV4~yU zt3#9Wh8Tkprj+BYg|@;`yjUo4Bi~@kBPPInx0jV5{fvu9kY-sE>{_0gg;ds)C=(*v z*kJ~;+{5Y!s(G4AZ_pO5 zRj6IF92Q$M&3~dcDRff@5 z#veh&M03_6tWEY(dHL9T2e*IYzVm74YI-a1M8;xcjxA8tHtY>~TAK3WAj~9mqnQef z(gSkqU3kCsf-lz(=BZxnn$i5O&4r&(5BE{OEAgFs~I& zH3huPRt|rLgDEJoP05blsnEfwf<=_WILq&-8e}PB6s+AIU<-LD-X8`_&juZ9R>f7MLe<<;y#0Lz;oBB?RxSt{ zb41?c@*nHVz<1jGGe{=4i?kwlxX)=X7x`G?tlL_Cbk246`Uz~L;2W_O7ka@Ar-ljO zIc4CHZWysdg6?Y0S*7@(z+Jxks^L;-5oA>%I%8sPbXhxcO z=Bf8XW1SB6mGPLD(O5JjlIIMn4E~ayTG3V$2u@F}mR1Y;l|N@qJXFMwn|vE>Vi{Q` zps{~RCJYW+_?K2DDY`lvT$zhB0P=xExC;UQ}(h2e&G9Rz< zK-p1hNX<3{z^Ru04XX2+4+MZjri+a#t$3~ipvQ=0+{rq7toVZ($#2_Lx#;2B zr$Q=j$y+*kks|0sU63?YECi2ZwV4>Ac`G1JuA5O%TBumvj6%|U1Isq05RXz=4{vE{ ztYY=>mXdh1SqN6`DW=KyU{d%2W1@u%AE5@9?`m0h_h3#IdeYs zj0HbCZd>AnB&UOBoHwd#l$cUXBBZh_=aRmcAP+XRrpXpSII4&gC0&h9lA|X(N)?eC zhQ%X$^kk6*WwA9GTFZEe4P)_|^^p-K0ejqXc*B^(i~aah(1zyNeqN5zy?H4?0yH`~(^+{#dvGoY`I<0r!h>eevBK_g>z>)ic-r*Pw;*TdWE*3d(&Q#aLW z&c-oan?7>-$j<_K{?=tH#`IXXf3UkBeV`?Lp0IRjAmvNGcRTUxfibJy`Jq_={3pz@ z*mMR$KUDz6*l38(1>n~;5cbggyeUXeG}7CzIjvhw41a{iwAHP~CHp-cId_yoMQGFF-K;W})5ax!|K^T)X-!P0pzHyjw_Be3fHxgrzJrp=r`o`k& z8}W}1#+>gPj5$AbFs9}kj8WGfjZt@@n(`Tc7O)jx0>c{V8x`Vc?NO298&sXtG4Yht zFX)+PGuhkD+#x|mYX+B#vBnhW9bnqi;Y1V% zYU~2V>~}hK6f{0>y&r+a@$tp$Z%MjX4N`8l+Lf`h(gT1AQlz&i5?b1t z03+6?2{2-PngBo~Y5!gqAg=sdZGh3*rwuS#`?LY(zzGn3Ar6P6iNhhO;;;!M8J{>D zk|qw*`{J;XBUKzWvf=bQao9e3PaHO~fr`rHaGm zsQ1KSA(EU;6-mzdL=q_dd@>0NKB0saNE1o`YX3hGN?xQ1B`;Eik}I}QVpjACB@m%) zsl=2al|Y=>1#!Z+kzSmx7FHdF)yu{@YNQudX@;>jM#qUU#@f2L6mRy>qGDp2HcN~- zc_K0K*zuVADn+Xv)S$}7c+)U$}d(?)frh z>NPA|?qY)no;!Zs`fS{><16tmu3NW0aos#?Sg2N%huEfz4=F?v?S5&@*PfEB_Qt-T zPFY1?qhG9zIZmVTx-c;JR(aFa&?!^oJrydiNc*PB##h>zg9lEZY*`%|vIZpE+Tty|D_3!`dp~3NH_iSuQO9&DsBhXy zyLBiZ;Z%eMSDCZ4mse?=&LwBQx;2r2g|f>BXRg|~<=_@@5KbQ54bFX7JF`^Wol}}4 zq8oBjM=UVW-S^t1-iS~Rq&9zj+9JxX&0Dc{<_70BYNr?J-EGoXAgUBrc~)~N1cyb3 zM659mvxwKyTg976TCIVTPU|4*!Sac0=NY+7Rh+Q%X9N$4<0X3ZxagKuoK>R_pBU?* zSzlrp&>(tvHo48@f&Js!^nQv6|9tgo@(Rz`jCYO25L^XJMuH?_(Q@+q(j@#4Y-n|5 z^a>Mkm$;ECdk>4>)n-F#rGI%<|kw-ke|~oiPeyQ<{PCwa+MZs!YTw7y&Xo9+p6M{ zByAzywiQRYfs@iy75`wwd_@^_lp0$rZT!B5KJ3x+9&x#``NQ8%R@NPrb@(7Yrz(17 zy@E*fmMmhVrizi-14U%V$6}!MWYnzSlFn0i6~~Ae#cQDEKx)QY!KHX4;nmI^K78hE z%j7We)JBI};;9?V!c5b+-j`>rh!{u&HRgcnW;O)>k1Lt^xK?vTe z&esc%r=EO>!6YU>spv&(%=&$dHUv>Fiv*&B-@pc;cu4ZSKv|n^N0bRzIPrbjVOIk}6bo3%n zWQ4%1n5%CG;V9fYcGB4X2!HTBMpP3w53$om1c0hhFT8*}pgyUO`pP98(_UT^giUtl z_Yt;iR9v*2M!izo`fEo7!Nj|H?>VA7g0Rg}(VHO>$Y-O}hgkbCzPf1#*~tktpWQz% z*53)rmiij3mEirf`D-`LTI<}kad5w0efq?V@|>+KJ=pY(SlhLZ@(*I^P+B{kU4Ca{ zQV;UFq{#`X&3J=celIkZNFQ2>GSTm%PoJnTgJR$CJr=M?F9wzV z-S4#a?D~(C*Tb{RbrXj6j%(2khUL0xee4|c-u!tt3hBbPY2R`!zoI?EX4XsGDE)b3 zcIe7dUl^teHTKSM!6J2CY0E#f*V>o%T8sEUt+l;RBsxKjtzC46Euxy%B02yqLk9yf zOc%7`W12Rv>a5!I=u_kd_sG~XUV@U;aZ;U^So@wuo}aB%|LkKM1T^T{JiJU`k)IkN z#%br=`7rGIDe*&xOd0HK)_L8L^FN)v{Wb8)-tRBlB}V=OK-qw7f`1}4w|Fg~psBi5 z)0hEX1cg_|xENLZnxDKk_d|8>VRNEyH^g=liUnv^NzhSmm9cDusK3SgQlsobx!6E1 z!naC}Bj%I#s_%b+Gm$Ts$DzKu?=-iU-Y~%` z_)cu*Y((lUo6+yovUR2kC95w%Y4Nt}zJSY#XA!o(5@VY-!?xS>*w5(e5p2fQ)w{m0 zeG(WXy2M7a&)-9W>|p!EsOS!05BJ5K@St5@U(7x3S$IgtC`E6? zE3(gHWiQqjVpF#-u#fE}iW>c}K%RcD59!sR;R86+c#PV!5WFZ+_&Db zmy7yhNs1eK_j7|kv@3dhKtf{Lf$j6=F7{y3O&L4To^;sSZ+&-SJ+^j$5%=eDEfQJanD*Bb zEQv2kXLCj_mjvKbcyL$dV}wlJf3Ha7jkt?rUSwWf(3PMk>Gcl=n)zUb(f4$a(Dj{s8B z1g3Qki$^&bn&>&&iG8TiVPA9@igj0FOw(q9b_-G=!f`zdU&2GH zHfZ+?P&3f(oz|@MHt5f@Xy6AMiJ2;9?iuihn6c)qbT(iF!ygP@J7WstQ`dp(SRs5N ztYLD?KQBCfc?Yn9kckD8ALsPg%8Pv-9{K zxKCwtK7F?@V_SB&#J-H+=0Y`V@B$u&m>76krf*eY&)KQn-}vphA8e)~L$}!Y(Q)p= z%7QhUX0CJY+&H9PufBc2dN7@zOmCZOT+%wNuIQKG7ev2mR7bxkhm0X?#Sr$|L-4T{ zdjSNP&c1GWUv4>l^}DNUJm-7_3WTbf*YhX&LZE8HF3nRZodgze+D`gPg*C0fH+obf zp0QZi>7S?mr_%(C?oZ#_Y8%k)j=-@K|1&70Qi}wTE$BE@-}!3jn!qgoQ|7VH8<9}V zXV86hLM-TDvQrh6&sqbEWah*$-lj1mA3bc zzr2_KQ1M^h-`7#91OJ%Qx0Q?HkK#e1@YLQ0Z7oT2+NRc|eEJr9U1W0F*GGz1Cn}D_ z^TAPxVsU$uw@dH8YMd};*&j(1d?g-McRV~pD>(9WD+|>3pSW}v32!xyy z0Jyo%4Wqu;)W%)=tk(4VzAwoU2&pm`q{}YX&kyc=#0~7D5n8!}Rk@#Zv|QeeJew0f zZoOfL{eX8m&&yHfU{9Tbw_03$A$7R*%OC;1_!N4vPuU05G>SqBKtu}CVZob9exe`G zXbD)sAiWCvOh~}_d=hZJcM@=rUIck98tFmEY8ES0HUShUlrG@*ZGq@h2t?b~4ecK{ zxL?=Np6d;y?^f;p=4{~Yu;et|ito<93N7z+GeNq`{KoIEvpYyX4jk&9s&7B;DHr8q z5QqUumolp8>064|o?N~OECR$2G)frWt8Y{c`Q>*Og;3@4P-gk`)Ro&dA4Ax(3VFHS zJ#-z0Y|i@YYOEJZFnr1l^z}PA=&daW2?bgiAqe5c;_H=HpP7w>04M}@n*8aDq*Dcw4me9u4blJL zG>;#-DfXt~3!xH+C-hhmlY;%LZebdUSIdJ~HOQ7<3EA>X$d-Ly=(OFQ@S&_dpi=b{ z`@r3HxUZh@akGXZl`7a7~WgVt&C!B)#7=?+&bx-TRhWSCtuF`YfM8# zC+tTFam;X7iI&Fq>l5GG=_sS(?=73CmQh(ln}n881&}{pBywJ3kIE~yD4hGmmceSB zQp>0uqQ5N*rOi-qPhy6Sd4GoDmW6`Ur#5H*`UpAB6!PtM(mA@O6@{)MwE(r2ONgzX z3PovMh^-v>Zbp5L=l?2dHxwohzJV$__ru0Pz}<$EY3IoZ$<+B05zrP6b5U1b>2(3> z+ulRPdP!8U_o)t50?QN=SMudM`wE@`J>+>XpqGGPJAQcQLb>192)3IubHsnp2!4G{ zqA4o@m!F$}pIo!xb3Q&9Y)4w37n&9E9n+5HdaDYhNZR2LZ*$*X^zMvZVWvxN{I`|z zu0joB|wu4O_G_eZdarEVRzTJnVs zfuS#O(e$C_{%_Igp~g07G11zG#&+%JAJTY$VZHjgqir6bh>r)zBA=eJ3Oqof-~p;p zJrPtWUhH!3Dy#zBpV`=XG41(;IfPT&%mE3a>21b!!7BJuoF+ul<1vX4IAfaH0;f>U z_|8Q|`_4f*Bic1qy>GDFwG{@tp1l$Lu-;e-T7F>H+>lB9=a8Qxmen5#Nym=1dqTpTZQ&++%R+Am&n5q(Y9k` z^}wRHB1H0W3S&PH(^>**Mc~{BQh(TG7Pe0q;h=I?;3ea`{IejP5B=<_ZQt8O-ha;k z)ioWyn_FTddH)4*MEwWj(mCC6PN%VgMHaf(-u+s1wUxFwnL{X~!gWWuPDOoeHA*)? zouA??O~ASnmuZ5p+9g4@Pg6xbu}Ngxn5gK3P#Yizj7wAr{i_ZwUU$Unl+UO#LXFo4 z8pV?I`l_S8x>P*GW)jEJC>}kf49Av%Y5er>c?P_}E0qdlzhs zVy~P5N2*ZlCEeh}>Dx;4h^^E^Jf)Xr41l_N6V%mka0k#S1-vOBQ(-aDypyyYYq%12n!B#%n0l4{lr- zdJia)ym0C14RAJtIC?n(VEJA^i;cutC_x9p9uoow+1c>U41s@Cf4uDw*!5;(mkpu* z@O5lQw*$tg|Al%JP)iW;TNsZ62*~6?15|r3K|cvYC?%Q|A#piqmrE)yL}KJ4dgJ{` zY^-mMpCmr3oldQcZ5ZZ#?|Kn3O7|fz&k^$SLCCN?<0FXWJjz7YRHZJ!+Nqv*5HKL> zaflkev*0})N#;*zJ0%v4^)*|uT`8^bzLdYPfLUNjYpT(1=9JKd6-<`qurv4$*O56M z{9_q@$qJXaZv26HM!4H)uN?p-08NUW1^yvIe>C26l-r0hOF_3sz90xg(q`=7DM&;WPptndH4j8 z6X2@OLjBr{-{E(Wd#V>d%R4-QlXhVmqgI8oB_a@A&jp`S0pFS%s!G@clgtVMG#qyB z)XyAhf979&_HN$SaA~vq01lGXCmF*)PLLfTYD#|-95|AW!M}KEdo^XO@6gI=6r7^- zPkl$;*>rr0ZAH*d+g6n+>fmIMYZ1n%(R>ksZbHYNUUk=2oBrAj?QhbgUHd2~ZlTS+ z>(l0*gf{oxNoaGIrmI=nYn*ra!9U?C2o2N`K)>h%2au=q96@v)R7eWL_;azyy1)gx zv`x~4x}z)77X?7Q;gb$ZQ)iD?Dnk==+G>suU`-SjVlT%RCJ`BK% zEP=jvfcLKi4Qc2S4~V5>`I9%4O{6u@DHHGGwx&>9mEz3q$RYNLgrN;rRGa{nuS|jPYz28g#&E_+W zS3a}rP?%lUDu{0wN^Q|hbz`|u6dG@RSBF@0EAW!HIhdZc$}pvRpT1=$51TeU-pwm0 zQ|E4;w#Z4H-`=Az&uWFxN-V2PC9TSI1ElBuHp=mmcda2{uZOVgsE}P$c+6_DUYeLC z%;jBSO`9tpKK;f0d8GOCP8PPxuhFA6=uttlrFWOy*;{NJ4;fIhQp-FQtq>7RK_3`? z=0_Sn&LnRu2~PY*985uPOeFP~BlBNV@S9cCq@hNlZ*Uk`q^Ss}1N+C?YqJ}^gZw^= zxqYFeoi8*I>eWQqs#jQx^zo3Vgkj@&heRk3E(QprU(=+)g)Uq>aIw4~`|YS_s?a;o8(sO`ugOFvK%bBhC7nPO-UE z%bJVe(yuWqr?~hm1L%;@Gr)XSQ5fGofhP)#Z>g_>AHb`i5jt;VX2z~ZbHHY@Z!&ZUW#<`e%V775C5tdLsXomI+%@dCq%I8TA z*4rp=b_KelHHkk)A}pX%7~`=;CDKWR7uB?u(h)c)G)fsITZ=%6`^kWp{8nPcfIc}M zF-7bbjdfwWeV$36E|b`J-qRY794@5SS5EG=gU{B4u|i2@w2~;7x3B(5U!RN`%=$O{CP(#_AUx_+=6H3qUy++ zSeQQGWtJfeqz(+ncN;P<#-{btkm5WHJVHNgD@DM{j9B7XqF@s|CQ-0=FK@&aj#79c z_9=J+gZ~NpB^(`;Lu546GF}_VUnY!(vP4u5(hk zmg*4GCsoz7V$#t`2W{uGJMWy&zPcb$|Fr-LLkjG_jQ$o&j!}Owor6?|{l9hCD0EcC z2AJUSo!}-5qX?D8Am-EY#+d1n_Gpz7qMKE8ifz(2tN+|UpHm?>ed^*Fp_<`66zha{ zpES8RsF!o9dtUnM)LvE!J9|{04gnXw+2Nd7aCNPAb`8v{$g=x1{%w=i_ zMk|}>B5%*aAfQpU^mJn>{|AEw59z(onW|kK8H47{M ze|y?2`F0wa*=ek0cQi_2M+F=rzlQD&p>aGPA6Ev1FtwK>AMlBwniJOu@;XvPi#x{q z5~wU-d+lMhv<687G2iRJD&YC8c#ZsE`YQMTbUIurAri%}*c`vb=~z2hbsRH~;$30)|3ZoN z)`IqOTvsG4a}FCabH+HNwNNI1zH#yvCk=QCPtN2oJev@5$TE6Y_-0+Wm~}1FX5Cua zFJe{xi)P@6@n!1d5eyGRk#`JF*Vn0&=S#&f6~~kR|J_Vwy8f9qXrD`}Y|ExF*S-@! zVWO5Q+pqCoWL7b|eM0L!A+!dk3au6sZ&3HG+m`arzIA)idiXL|w$n%fYR>VW7o-TMjMoT5kByDC&%Q&gNorrH+i^@l3K z_WTz+a(czH|Dp#Gsv|GKUHra?;C`53%9Iglhb&| zOpt~I@A%rtPGvM0LL3lEV)}d5ui3Nm2Pb8}RU-jukH1M?_SG(=OB;gtn?R%io@RUx zV07SEW#w(Y^zb638`%Qz$3jzC-k#jZg+aIi!=nMjTQj7ZhbEX9%$a6A(J z!_VM6JdzEXB7>~u981lC@7OQP~IylXSkH>Ve?HgDG<{gz`pH zmS+^~o@UL}MDX)7gmE?FnT4bS+2_)X<9i_r9q(9YhKItuL;p9rcbu^IQg-vcO}tWi z3DVaOD$G6J<$`Fvqr*YufW5T?@biSdH3L$q?a}}r8UuXXJGHpG-jL>_sJY5EGFi57v?DbN4#@%MmC-kY{U0o@B+d>@4dZh^@(cq7I6JE${0hHl2%DhZ#> z&iCC?Hyd8iSgtrCRs5aok`5nLqmJG3;DGIPNwcBHj}*WZ_mc3-=Aj@=b(>WlAatJa zujuJ>^$_(XTm|R(XNhvEj=E;D*qr`VvqjwUN_;e=hwZRO_eG|O9Y~)z2AYYSz<;*& z2{Z#h314ZvtAT3*aMzDurfWID%^oV$asr}!{dm1VYpvB^ zLxgW{%Bs-Sg@87%N4o-P5A@&l_*Gc1KZGOEhXC6i4}7iqd{-dAq)?!%lfIfL(hmIpG}`RQe>Ek zU$$xLPA5L-hrd(Eu`!2wo%0Mx>YLQ*Tj0MO%FUeYqQD#C%dN7~D4BoG%1VyGW;}yy zY5_h^p~ncQ=D>bfWmORltjk#ykzSQ*SooSK zr_#M^#dbUZS>Z|{-K~dCg3I|q;Zv}{!)Px#pB>n^+r5w<5Zu%|0`rA&*XDG(DBZd> z?o(~FKaB-u>3D?)tO~y;C`4*2W8vmro0dS6-h_%$YvugnlWX>F_FrHP71(?uWXEjd zirrAUyTJ=V8P*nF#^45Jv!y*AE9B2zYl`~cHrZA7=>-4EoP9NIF_<&KB2V`z1qF9D zj~wXjFY06S7~UThL+%t%!UlXRmx}dl8|EJMCnwoMK9&4n7s*bl`Z$D#0QM`5t!&Lg z6vuSBBLKMvvVy>2F8Gx9Ee@F8M=2ubd=KSR(xY9%k*?Y!`_9X73B4~KRn~r#Us3TW z2a6&HB1BsIzAonwVf&`Ew10ur%;vk~ZuT5{|4M@Dl7hu#x^H|9wJ@~1#eniX1jxn3 zIWz(BOt(g2CapX_GpX=h0sWV=d_7*s9n=0BwmSLerZU__3M->|IU=MGH%$oR~KL9xD2VghG z0SsgnK(tn+cvzf5qp<525wM@ou@vRwJpWibut_-rSZ^8Hg7Uy59E=xH#rGnnz(rVu z45YLZub>rP!JYSBfsp|qh0QgepqA1~wD9Mv{Idn`a=E*Z_ezcb1RC>?oWlptoB=;@ zpTLLwnHP?!X`k4HVpQCVtti6?RQLhcpQ@Wx$7a=A-ONx|ZLB*Q>((bU zR}^Qlnh81%kFzm8j@)OTNT3};Ju}sVs&Lz#i(RR^ioZxjglW@V6}Eer%5mbZJ%75Z z=%77)Y);Sa-FmfB6b;b%m_0O6MNs9$zj;2>0)w*+8s)oD4zGJAXMrA2=PIjVst!yzPYy@0M@w{O!0{ZGm^Ifg{gWvo?3MkDW7X zw1?MG;uj^3JLW_>*NfmSA9$S(vz%Jh8o{)g`Nw;v7q-TCDHnnTxt^|p@fGJp+GZxvE&`cX+xHH7$N z;9-kA)_q`%8K>lFGrs*i5HK=rpoqKzWVe=(!FI*CE6wP(`+VThHAfQMyuLi`lU9`i zdGK#}so7*Vf4vwM*j{wg31Iscz(42;(JW5R7hAj;cRHa{K@vFXF+)7vg!(+aatYV0 z?f-C37xA;u2y~7nCok5=>KoKJY07S7nbt#w%5rNb+}GT;3krw zuh1PU;Cf|@stZs=pr9uk*H$Ebv1!@TJfDBQV%{3($}cC5U+XzZb)@w%^}lH5Y}9^q zTvt!iPO)`*0NW?lnwPTv zmyyO}i8sYB4q@kSCk}_mbPo23PT=vTn#F*nlmPmN39c6E8D8Hr31aOa-Ch=AF8h{6 zjoLhG`)+B=VzF2jj-c=1xX63WL(oV7t##6NLVi&gsfBakxMXa38zuNGMhleLj3< zRew0a#sfsO0Q|lCFZV}Oza${oMh(MAv+-tuyzFa8T1`8+Zw!<8ZG~rARO{P`_xyPm zKu;1t7H{=Bs z-ZvKcnX2+}fdF&!U~7!)<_1x1Nck0NvKCfq$5N{1?+7(ast@l@J^)l@0K~zAIu=d zf6bItlaPzxIE)9c<)O!h9Xk>D=JB^6TZ2^CSU%+K>#c6I7AT#KLE=Wsa|g~E*gtS8 z&l#ML7Z~KvYnTBFdIqRyK!C!NJd!o1dRBAT6UW1j*w*_L1l9w$fD%B*EPchnKW@ll0`!le+NNTCp1f_z6g?IQ~~EupNg8KViv@FG`h z1Kae&>a}@M4=*9p6|1fWqtX@ZQ{5CWIi;}3V&Lr^(O@+Zc`@`BoE`kd__HB`N~QzIpNcUPlpB$osK_l7@!Msq;ZmrKbAq2MQPbA|0QVV^6DV_-Fg5;tRn3v9L@ zniX8`u^ckr>qnJYHYMNiI+0v+@^ZWmTrmXajfAt*cAe)DdGJ|Vd;)Tjhj({c4fqYb z2PBN~B6S^}2#<^(G!fg{5XqcBpi)U5%J`;?JY&(cMT-KLB`lr#h5O+f?k7{pHaz=V z_~cnFkYU1+j~r#~|E*hS#_WkOc=@MOv5{Ln{H@Fac}Z>b)X@oJ+*YnfOp|Ahm@#Zb zpcM$O+t)~c)~Hu_w@}PxV@&`dv4q7YNek#9R_W7)3Yjd<&avu+tXzjp275XTldB|U zvu452viuVI%r=_2wE6N5K29nt^ZM3DS}c7b-IY7>FC-Qg(9u5Vj)Q9aYvBO?knI7a z8f7Ui-(Z75J>)P8q%pHxo#CgvT+?^dP#jg3ZvYOlB(ff7k{?>Qt+spzHY3U^rgU5? zeL1)N^u;BQ{io4&kgBW7xV@k49=XRKhz=iHMdbM9Q&+CA=}c)sXDSFf6E~~hyiZ^b z2idAEf6;kqrv)9c<>RRTGY`{uO=k^-T1Dh`Hh6xLgDbh_bqfg*T$-j$@6&Nr7@ zRFC?kXT$)HwHL-Q?;H*6YzgcE(F+kPAXoUTZVdN#@((K=|MArB{oj4zVc(E{Ny) zlV@eKcoSuOHB|dhpkQaOesF$GE+(8^T*wk`-FG0#q%W4g`89VQGJQ z0SrjUX18~{!t-ybf@L%;kR=CcQ+T7Wg1o?Re>#fqWVAJVYKe*QlXiRfI`7&xOFFFV zwjmGAHTp?&=1!l3g=Q5+UTiF>{8aY<1X3$S>Ud_*&4+;$?QM;~M|n48k)~|fny}UR zX!?!C-yvmJgwk|q`7xyu{6~zKKElbenm2*%D0k*rIxUQu+jeH2_$kw-IJdkICwmr@ z_-ny+59N0QdF!hnQF^eu44;cY7l2|n+dU+4#G%o7@OpMKI1Kib2j#0)llu?#H+`jHC*-w(#ueY#MPjanYh;f3C|Xy@T36%;~XSj)35Nn8caz@V~_y`hYSkEuSO=- zlB=nozjr@%)M&^f=wr%la(j~%UVc+)2-rhxUSS#VtCZHmYmBKB?>|kUPw>DC-%-YO zJ0?$przFy04jwl1Q)i)Sjmv$Cq!qw0wRXrnY7fw;gM|a*6lmSb)3WEkj2?8>P2tMA zx*GgwpjLu#^Wu7-tSgk?LT()+MdpZ-JscbwX=jsAWg;= zvqG9GUmFy5w3subc6iw#;B8&Jf!72&ZUmx*NJAP3rXbNq0)V3T$-nJSzVgG4ZU75C zMNJr{SZFD%vb>WfO-qQ!g$UkV% zgzJ;seB3k*AfMUzOND1Ml8~epex%uu6AJ00e;s$Cd6~fS&0FS2`kt}~$x)X*eoI*b zp^bII!(@K3Tdwsb zp5u(C-D;`!wz%y_0#9t%o^)bOrz*2)4ZOkJ`f(&s?@_`U>ajRa2*|@K~8&(G%rC zbA+7p+JRtrX$jCfBYC^?Zn!>v#Ql}&NP#olIdWt|!YB{CZ zbJFUg6{y*ayWId9x&gV13SMc!{XEE}J|15|#5widfXU#X@_|&O=qp}krF%9A+?I1- zf&wX5#ou_n4M=7OZ0QE@rDg6vRnZDp%YVqN;8}a>qyg0I6!fR;hBU=?w^bs$CWV_2tgJUWNAkGX-51?MqDhj}79t&te zg^nT;2l+F2=sjbfHibIO1eo-LTY>=JsZ*+AY&6t^Iowt~s~pgujf>#4x1smDXah28 zQr>&NQ(h2G^wa|)kbsn}#j$=I+#hz+nMfulQ-OO8xnIGMx^Se>%btG)hbZV|yDp@v z;7Z|{xfo8@0`Vl2`~Al2F7}}9ptZmJO8EmJNmYccVKkqDy&7!aSh4N>Ht*&I%HkyL zVxyvktJLS25%`9^#s~_4Uu_R?!S#75V=DhfhBluN0o|b>kM`%$>_^4Q14-ryv~n3i zESn%_(JbqwrDNRL*(SW(&*5lQ5$dPG&`p*D`~P9__BGpiCL>?p4T&9qFMLlq${Qq6 zYx%o9Kkoh+X~KUlfg$CBYFFo3LoWUbl~PBUcbxHwexh`=cVDA;s`FrbHcl`3zZ818Z3)9+e*~Xdz#l{ zuO};x@@zXsh}?GMf_A+Kmi~vl><00w^K8ENO1SdY#f%m%ZjF!_siGfu7rpV8GCaS54cYy?8vh_~(W>0-VOs#AQ2FB1r$2rV zO_Kjl^@kf`7s5q^BO{R)dZsGXx@*iKn*?+ zdw!NHK^~Si@P^@w8`72?%W$l z%by70>G8#;`FFXY1_D;d-`3UN*PX{P*Q^qN?07H+4wTN^+lr-KDDWI_*j&jwvR2;~ zZqV165>>f(`3Zd(2ze|QII2kCvb(SY6tA$O&YQgM_=LX0M^5zT#jDgW%5#Yvpk={z zPJwnIaL?68SZ+u+Gk5l~`SWMd4xW^|RhyE%k-XDY8fiT*?>PL+3C}(1q*b^cxBqD1 zl@mvA-a6W%W}TQ;TxsfdcynRtXV#Z`=Zg$TOO5$bUKjWUXzy`>FNP>Ae}6~+?es#V)A6>ad@6Ldy=9$9V*PX|+ z`nRH1`;_Zn?s|I)h|lLv-xj+}eV7)y4?|nR!Z!)eRfTWFFd{V>6 zg3f|r$8TNTy!+RaKma&$ zwj08TshvN9&bCNu*sujUd!#i&I+0&Ia4Nz3syhqe%GYj;A?nk20wSJ+bZ@6JPOXWv$SJ6BHLM*5gr<$^qV2W{4-Pv@s)q_I)Q;&mDjxgoA!C1{(vkCnswo z0jC;OyF{sGm+*SB9=-I$vku|vRxlL7t1LHw<#+Mvtk{!#3s)dbxI-DmET8rrMerp0RE^_BBx;WAt#Hk5CIhrf!USl<(N<|+lS5=s z)4a&SxLCqUZ_XD>;ESHK#oF%=?jayJ!-KN<3zR7fEqR7A_kMF5ndq=EhGRG34rx=X zh-G2Uu*SptAj_7>+LjeEUv@}*eOnEW3=eFyEOJvTH)?(a-=vQa(v)55bNHL#=bYzG zFWYn&7^TnzX2o6EAVO|RT6X$O;Mw7)`yw72lW#Q7rLkHr2|BlyvQ-n)5Q$Ml-kDq) zPV(kBVb8_pMn8@1L(Y?ZNU-hwHLWb__C{U!y5eSED-QnH_=wfSIV%FWYaLcuBLEsm z!~ky;P{I71aofJ?y(F%NnuqP9lwIP80q!NiK6V-7#fh~17xr&jk{SeM`I6KW_!}xO z{o5op-inwpnRS9MABuaB$$?;6+CT?H3HlDz%osotLkw9vxr_)Ut49-5e(lfeY&A~c z*3=tT>fWUU)x1>!|Ff+DESxv8!qM0B&<)$qp{M}yG=9wp@I8&=1fGc+3;A6bZCHB= znEO?%_7d9!0DN)S**+9Kw5wzm=eqjN;X9pY3-18?yBpjD7HTk^e@)rpSZl#EOWyn&qu#{76;3Kgz~UTBBLtBDj8u!oDTc3Rk>0_#}y!Q;ZvY zB5Iq>-fF0OgI85pBR^!%Hg3>FfMuZ`p;^6e+Qq@U3U^_s9YQ77oS^ zjUIpxen2lkC|>Xfh*o*PUsJSDCUmf;BrF29R&*QlK+}PJHV4?2X|TKDA5_jL^T~WV zO~&H-7}w)*9FO(%G1^BkcC>am`4D}jx4G=GUi)26K7ew&%N|yno+0)PZI>4VtWP-r zs^>j|x5Fl~HX3~oU*zvWSR3$m$hlL7E`k+mfIhMOMv(UP=0TL8611Sj<@)gh+ctK_ z_FlcGz2{(ymg^fhd9fyl18tt@_DZyqS_s4woQmwLyV-S!JGnD1=!U0Y)h=;Oo%Pxry>>O}*aeT_;<>fzZJXC? z+I8crD(=@ec5XcByl|*l4UpLKtUH^e)xGph+$AS+6$`@$Ow-lmkSRUeMmT%KEj!r8 zbE$UYO|?P23*(iyo9m%!@{?Ix4u0d@wsCmtWJpbBW9-}Bb3Dw*#n0foJ1m*G%vRcB zEM$%Z2vOkF*+@an_#6u^;Su1-9~(jNpEwJdvfE4Q_9`5O7CiTIf*Q~%FNYee1?3J0 zWO8gl9$V-v3N0wtFg{Eor#u{$95nm~V6(HcEI+$qx9qi6Shc*Ft^ln)J1zyVCEO0*v{{VZ1}IR81f~ z#d_x;3TTHh2l-q23ciE05r#II7Tuoxmn4QEt*#F(3e(j5m>wrfyEA9j&*lQat#oks;n1Eu-cU~Vo?~W;tZg!&Q$H1 z2=qrruj(%8X(gS{W*yLy7NzqXY?;gNqE^$_sFbk^%xCZkI51d80_}A!zX6lbr*hbR zKP`!#7?sISbZD;CzrXI{{U5pfE~C>Z-9~AJnpzks3KeiBPE?{~mo&AlC}EfK`!T;= zdf<{X{kuMjTKCbTE~*v%xl0L}hTIVoUH&sBizYcB5x;C9?*9qdIxv2_Xw!d0oDH2)AzuR;e7DeRZzP3k?3phSbmw=dq1d4WpiIR9Pc*@>b-z|O@AB#o+>+QF;hR*xU!(_m5A&fzQ zbd01#@phnWI9c2NzPQz)frZ^Gtj_Qh-7B$tV||zCKECBxyERdoC>~(yeuKU`G%DFi zgyUK>Mg2W zMq#8~|0ZyN>^m_v<%+%+e$S;;^*tC+&r0>Wq9VWNlBEuIN2|Hy`>n*eiY}$Bud5YZ za^`Y&drp^JFm-;&g?+ttNx%5*6?eL%tA4lbS^;S6yLY8=Dk4dz`udM6$}VxmC42m? zivmX%t30tQi;_kkd-L;;;WtLJxB12=PCpVAio28w+jRbgwemCZJdC3PO;HQ-gZ3eh zaLK<75f?&TO7UZ&0GC9ia-4cvJ-rKUorVYgE{)guRz>kEDy}VDy=Bg3=l(7IyLbDf zXWQYPpK3`r)*rZZHSm`jXSme0!}yVLZt)FE{4zX6m+M^Z@M)OBFrHeltI$iIF78x9a(EDCj#rjuq~8hJ;kX`tTlg# zQ+aTzEPN4}HhXKrJSSYq+cea;*VQ%S!-5^9Z zoSymybge9F#ka%ZmJNhXti?;a*~2tR2U~&srk?^819GiNh3u+M`VbiQ;F3qO2w>!N z;NL>}5Jyos>V~6kvco$GkUkF6&#}T`{NS*LJG?;-?*+%NNcraQCOA@dh}R`bIB&Ev zYH3zhpeVYy3(C!`@Q6j3T$Y1Ai$Woa6DRI7V}91yA1uYf zKkyZKnA>tnDJk%ANs%GpIqmz{_X7JTk0tIfy*w(#t5{Ar(~0`2ZGXnjX-10o4<5ym z74Hu3HWBG=Y&W(`RuWJWw&OBIu_W2p?%fU>ZyxUsW1Ga1Da!i?+*P(4DVjG0xDZiR zvSQ$pc$^niC5F3Q(r38NqpT#^ix$OW4Lm`zl6jPY+s*1MNn@An5s9`jM(doylaX62 zD?j2M!5gfCQw>}qn=q(C&FZjqv0@yDRx$Sw-rx%v3Mx@G$zz?PI)H~tV33=NL08zH z74@v+;YiPQ6eijtJjrdHB~U%1JbS9K%T!af&dSi)IiP_!>iL3)4^XV4(3bUQ#oP{K zJi2A!xg5`Ruc8JcWPXGfhJ=^wVdviOEvlkV*7tZgE>IHmt6AxY`wMC?M7GdVSlmR% z6O7fGbs5iRknocrau(r7JtHW5ox+NO9qEtZpl6~|G3eF$TSYwsC>$BejzUdUgp%Cs zEIRC96DbcuT)~>WqQ~$l2dwgM{4u;Ha>DR%#KJ$@D*6if%rS@Hjr;^^tdFgO))CCI zJ5RK{YU=-!jk?+$q+nbi-$6b`G= zP+<3w2hScY5HwcGYbkeQS;#nox2O3`WLESp5+~8})Bk!<*TKQ&4_dP6vhgeYC6rn_&inv)@ z97ZUi&03dtTN+w3M$;bzo@bkI>MF&yiA)o8Kc$suS_1%`3JaG=23Aro zU`-LuI9b^g2?im63W&o3;;57ipQ4H9;ZxQvPe6O|?j9#=sonsx8_u8-GyiT*dpQFYG^}4quevMuDq$POq z5E`yOrpzT-bA`XQnrkzaQ&Di`1Q&*4AIJ*=&$d=y*0g*45oc@ODU?@lWHfERZn zP(Z;FoKg0(7{VI7a!PK ziiX%Lcy34Pd{s(!9IaDlU@GG51>=dvgMY6CME&7v&-EL|Q<5G!?@lEu=7bq|uL=tZ z07KLkyKh`gPCS1xIR@FPRqr(1N|n?w?iGclDlM|aG^Q=ETfyN4|DKeFHMA78`lWt; z2;LYaDp}$Ygz6Lt6(6W~t?J^2hAQ4tgeU>fa=mp^l!A3y=dFm+YW0qF3ctvuZPwSk z*=73PIw;4#CM%V$nqb(;m{Yu`rJ zf=lLqQ`wkogn|I9vkAbuGy+iFM*ymW0Pqm=bb6nBShWJoj_Iv^PW2-7VR~z`@7@ud zK9Hpc>d&!?PTCYFN>H4lvh{)4BE7AFE_wT!=h9t0Oy^tEU;UWw4KR+RyKy90$ddlX z-=~#U;q>1}d3D4aWJq_fGOGyTZ;;;VrUByYoojxR-Wz-ryt7Tg8~7-A15ohJpx`|4 zzZg|Z6-4%28ky%U(tqnE)f#TzNN;~%!T@9VrMEWrmEmSgdTU?Stw~0g^w!>@^H-*S zdTV1R1E+LG>||%a*O(X6dn5L{3E1xrJ@Bx`zWuJS*zXGa_PfSPnPzRU-wiiArMtn= zESuOCxBIrm?REmDqRgG{`+qWC`1a2Jse30jKCySk`}WTG)V)(27GOBC_s)i5@8p#J zao^IdtH$8;b_&}t^oH-pfn%C1?+idOI_aU#>z`j2h z`@YpO{fpzg&PTE5>3!G+XvK|E>8)*?@G%-Cz-T<7%pro&_%)5uxcrXMu(lZ2(%a-w z-TKh{IK8#aXM$&$SSFQc@wa)F6*afK<5|j(pUt!6y#|lP!TRPt{pP2#EJ+8y2g_oq z#k6tyE<4hFk#Srr?F&$IeQ2_X(_>=g*=qJ zPBt#Rtvyh!0%pVX)&_USM;VpUT|G$mPBXqrclBVM_cWTMxB6<;3Nn(@TifL~U9g-v zWV!^F(;8u(NbjBCOnDVEI^EUh=)9;gG~LzXb-t2SOn3DbIz(1jFUzT5hWRvhGu?OQ zQLXdoUsB+zy@Of7bhjR=W`tYOrk&Z+_L5x9{p_0FU(RY-boaEYCw zH<~e1_|g`9SQ{R_?IJ1-_9Q58KRZ*mU?61mjO7uq2U4izOID2hweQmLiM>Y1(Ee((V2 zt-b{)(5eLs!zfZA?=Mu$O(!AIpz;Boc3L$gs;ctRzPnt018JJN%}U1QS2~SP$%leb z3&ZCX@B;G5^vD`1%>Z0r1@BCrU*->CKbnfL@f6YECEd#7hc}s546zdM_CXm+@{GZF zIb-!3F^QFF9!w}HI;!sY|JZvExG0YIe|(p_+r?YOdZ-+ChhPB&L{vaTM8t*->BSyf z?6F3pF-49k;=xvT?sd>%3PL{7T7jW z-=gT<=>!($qWv;_D=LuL1`4zzt7@3N=9RV3!6Ox5+==PTRdC$$?6#ReJLF7ZdJNPe z&o)fREe9+=8ELR1JN6G?sl;XpR6OvQcv)zs;*G1L#;Hn0oU=PHXL^n0+t?Ey!<1rc zi%_^Lr+I@a1GOLoIbtNma>H=3$T&5pFV6O5*}s^`c;&78)KV4NZMDJDC<)oxF=9Fk zfvY$9#5h5K&T<4T8AFZE@F~n<6KCT{=3uEht57>0z)j~J%OqRIq;V76rji)Ti9PGD zWVPDREIcVW(n`wbuxgc$XJ0qTd#O#JM{}H*X>D>^&NWO@%82Ci+zfQ<=du=l!#lG zA34%)Z5^93+ddyi_F8h>?7uT?EAnW``%o%$hpy&$=&xES)I32;8fvuUH<4Hm{sisH zx-HK^OHfIZMu!5uv&(6VxgQzt4ojTF#)YsF4MuMiCYqzl;XbR>Sb_4-U8J^vTVb7x~ z7q+fG@nhg^KLDDaj4+_dG6cE;cfc0qWHx&Y!jiKn`yQ^28kHfA!swnyStbay zMIL2|Wt1gKP*n05L5~;}y?h=Qix9`Y6c_fbjIvC4LRsqMQI;PZl%*OdO9rDX^&OOj z9TLF>&0a3Dzl~E+-jvX_-e|Xs(^ruzV27 z!RQyL%MnIhVg+(0kGjy;9+hE%XgSDXQ>@rHy2QsoDc2(?8@0f56Incb>66y z&rW5Jl8rf2>>Jrt5L+|(u&YGly>#N;t+Mq3_zp@AX@jukr+z(a!< zFQX!5FT|DHJA1h4;qPP-WdAjd2BnOB1bg4mnA=>Q$lm9|y1$_vtfRVT@3g<~ZfF4G zsd%;yDrgsk96X3*58MsEb;6VLqI-5$&U;#r`RsuQi+9P6C-yLRJ@gJ{nyBR0s8vjz zqm|82089gObvKPWNz<=Wla2CckJh|k>h!wL={IdG_LcWd1^D#k!~1N5zd3i%U!_ z&>R27i<92POISEgm|G72`e~;9W%&=CIG$_DeZ*bl%k!1^HvDjYBEO2i;?&G(qRyc6 z(naVR>YC~%>3+}~^sV%5^$QKf4c!gH3||=z3Dt%1!eZekv6k3P94~Gaw~M>PTh7g$ zmpFgx{9Cc`V(%3@Q0%a=xY6I(+&Izrsqqo2se#m3YAYp5pGn)KozjnzTD)5Ee#Jj8 zez5rQ5`HC;OMFz~fs40GJ(rFypSWx)DU@_A8BsE$NKU}DzvW9vBLNYaz$IkkctTvw^uyqR?F>Uw*_XwESdY8hngpw zSGpH>Pj_GDzRUfv`w2^R%Nv%7mf4oOR-HA#8g89xonf70-EK45Tx_YfMYb<(TRb?A zrXG_#=6fvlSmUwTW1mM>C0nIVl@?aI?5XoC<>~6#z_XEOE6*ODy*&qcj`!T@dBjWf zlDw*Ug?J@;_44ZPHOXtI*B&oY*|oBF<=T})Du-3BU%7APH!3fvyr}Z=%IB(xRmxVe zR;g0Ow@Pr8@G1?f^sX|j%E&5TR{5dIl`1!@maZCBwPV$ORcBTGs%mD{Jyj1?{k>X+ zYUXNHtF^2)u-edSA6HvaZC$l3)pl0Ld=`+t~vCj^l-986=j`-a2)%%w6_3@4JZQ>i_ z+t#<2@B6-=_I=&8v5+KBW5F)!(Z=q57QapHyE| zeSP&U)sI#GsroO~Wj~!?Nx$-b?tb2W_551;#rh@tb@J=s*U#@AzYqN8`YrJL#&4D1 zdcQ4xhy2d?W%>Q=cgK(VOa5m6>i%{88~8W%Z|&dSzmNYw|9AYS`p@@Y?!VRlfd3Ky z^ZvK}YC=G)C%7 zvniir{y*mt+l^!j+EDafqjXd{lDTxXTwAUM|8cA3j&esj53|NvWPpW4!u9%f zs3)@X68sv%P~bJ}lV#^6Hi_+YNW*rN8{}u24kH*l@Jokj`kx_RO@lJdPPS2}F5B7T zgvr(@Y<%Exj<8AO7yAT=X*`WUAzZpgw53SRH`;ZwMzteVsV6pah6K=X8&n@IjsJ4% zrZbt?GUsNNh-gth?P)R44+Q1Y3q~@Ett8wG@@DML$DSn^vFe>u!k{!0q?3F1ojBQc zZ&Xyq_~?% z{|6Vy%++f6Wl1*E2(o;^Y9)Mr@qBZ zwv7LH*Mv_{c8%bg7dv8@|Aqb#3)x_+s6N#9JYfv7FO=pmFSAXC=#T7xurQQNwPk+_ zduGkX!Msol>T-(un9*V*q4|+T&ts!*7g+T7C8bUgVV`X(g`>gB)v8lWXR7bLifyeB zvd1hTE#U`bl36m*ujyXs6Jp6t5}2j2NSeTI>DOehL9=jPz-PF_2e)QiUGlfM49Q*| zV3DrDd6ow8Kyv(H%Xu~=mL*2qwI zOVvM59mtYrsfSIPC8HWlAO{iHrCE?1$m2+ki3GJLWx}snr!5n2?LYjh4dv5DL=703D+_eSG8 zQPEyzuc+fhdw|F2e3r3-Ncu#=3B9@tkGFA~7D6%UyrhL^1^ue4`->)61hC#TJaILXMkW z?oYPDoeXiR2ND+O5lTCNQp2nOtdZ?!YRBl@ajX}H(l~Z2VW&ZaQEo26nN1h+I&282 zAl+T)0~axxh22~8r&5FYQvt2W^QZC+b9DfJD*g_CDiU+f z_|!W~XVTiDxn|Kk(AuD81PzApRwS@imUa4P*aLO$;A5MnXzwM)^cxgmrB43qC`@tW z`J`2NI9nP97chBFXkkX}p1U5*7P$sm-)8wWxhtJcHY)xK7{Or6REDL_N%G^#7JFF& z^WL?-gqgh$MCb5UcSS=Q>@0DBrmbt3jY~jWSBr$XD;e`0=m5lJ_1$SJJ*u z8;0lQR5hSoEVjcQ!dp=J&vi01>zpvKfw}vTY0NZwj5vPg>i5^9XimG@7t9dl<2eiR zmFwrDss3P}4|X7w>(TPVStnhu4V9s!iKCM?VYiZceZvEmmKuOi zmbPcKHcFt+Z7||bv&pw$$*$~~ABe=-d^Fbi3H=AOwbFq(aN6MKTLpA7qWW*d?}_)m zX;3MQB-^YmruU4^EfQ&Eaj2vpu}SXP?eohCtQ|}#LP@lvoTAw+bbO)nDO`eq4I8Z> zgbnP_(HfotjVg?)#X@V)n%GA|R$JJF(u=G!n&?t&Ey5l#n34)-Xd2LPc1xF%&kT}l zXA8#L+RS_B3JX*x$`MZ{h{tTaszAg!Cy)Pb{{3V$73bL4gL>~^Cw^tx(M;io3%gY6 z!T+x-QQ$F*l$VRa;6A1Y%20b5IbNQo`A+GqEHS(^j}lwqvK**p3_a?Af$o z@7@g^v7OiS;n!bJUt!K%*|$rlUR@K0+D?TVZZF$@?1tOJdPivKF6r;RJ7)U)mnSoRdZc$`$S!BY)wqikjy`a((XH;l^b0>+YHsEzicArGOn8qmQ; zk1C1Gca)T2uQA+2K0^cTj!DyHEN!}D66HgSH>55tZ6C--XrELg-ZjNHx*Q@>BndFL z&9<0G#6z_5H|QWwFn=TPMPgo8z*tQ}*_vpiQrmLRMyzt3>13J**WbeZ_M$}`Pe|d5#*ojbqzCf>XJ9?b!Q6lshqk7vR23mvVUsm zAi}m@I<)izWnGS&L4w6wsi!5knwXwzFwj{e6JWf}-8^i-)G4EFG)){k|7+&HVmf+j zbGg()+j={8?TvJ&Rso%Om<8U3pZYa{?T>u7DuR@~3tk@nl2Z(rrIk!AQc_yB%uMr6a__?;Rmp*jzA88O+G|ZLwubDz>3D%}|wg z!CtH$Hn}t%tW*@~4iX28{yA)4{Lbx1tn+f5k;8A9ABw)pD;g(O1|fT+jVp~?ET;V2 z<>!_xw&Z6gKYL(Kz&gNg1G6+cg@nWDtnjjBb|hR+V3`eHeo|9;H3=}$>Q!K!sNQ%R z?1s{?cRV0&fM)DWnV7Duzp#mvF=Hs#V;R%Z_70r7P8^>&%#EJKK!t5NWIG5hc zaB3?Cg7Rj`;THf=%ABi$2}PZ-DxiEW4Sz)r>i+l(DQR0F|7cQ1W4>|?uC103LifN3 zp9?X3{N*JZEatq?ik2NgOPLj4x|vLpTgidsYqDOhAe+f&bgaVB55hn%bd7p$ME5>~ z1xtx{#LM0Mc6qSjq7w2BuS?)BM8GX#vFp3`O5DjQ!l6*t{ACqcff`_c0Ixc287ETNv!{Bm=E5*E7ge zFBPWeh;`egb3bhh#^>8ogTrm_n2g4{wi(Px zjm3CSE*#qhylfa5{b={KiqKc1zSwwH)KsgOAOy zUgF2>mJER3Aj~^g%fm?hzQi+@h&GZGfVmD&dzrB(XCOE(>*x~F8BKf%_G9?nBA$C~ znr#sFHwl2IQ~|^C5bO$Z`iU4{x3X2_@b#Ej1(4b1nf65NXu=%0oF?5dV{=F(eoWTc z0Haj}Y_n_3*oiyP80wN~U?L0?s8@(hMgxdVdtF_ zOOK;*B?uE=Z|?1e3UI|nm$$7h{Y_*atZa72w)iD5McWb6|7O%5Rta0$#tB57gh^T# zGR%w<7eS5iwJ;cq(2m%R>`!N!;fuBpU9AkG^_No5eN?p3FXKt3d?9dNK&)+|9Bjq04z;fjc zx0mr_7k@r^$|4xMsT0?^emKP6_acbJN}Iv5%!|0RU~rbh&a2Q1$ChtiXI%(WerD?F zL%jz$!_E{b`w1z(_vSCo*KciyNg6LVG6kx0mC=)9(9+|b zeT{(GP~1uz2*>JeMJ@WfQNNzl6h=Kjj4GL!^f!(etAp85iI%B`^#yiJ-zL^k%zDn` zd9XCzfP8NTt8F;C#5B?Y)|e{Nz8~4(2)F4spoT!en0)EjrTqn64Ob{3bovNbY!-;; z+~2Rm6c=f5BM1%0-NT0l+GrGPW`AMdWa>lx48&8wzCi;S^`40q6EemRqSAObq8A3z zQU)prU1*vC^QUzLr(!S-iBoxDExU6P7L!s27nOvSLq#P+K0Tq6Jc5}hoC8OUN)nb9NwjzF zN?7zBE=W+Z1h}=P!W)t5KhP>WrDv$l-LO;``1> zXX-`Ux5h9S;YPQClhYNFgMAonJCjY;oWoF7yhd4I7z&gnk{O7?et# zy2PU=iq;Q~@3xZgW9L})VXfH}=4wLRXy|6@f1xrguk_AdNqZ~d$^bb$FS8xw0c?d8 z%1p&$R?$UJzQDc?62ff1Y4rb1~aW#_rT2uKWrye zY-l>Qs4r&jEw-Gy_WeE>X%f$+)}3nu$DXtr3XZNsCz!D~@IZc=Ryi4VcaRO6-rRdP zfpH=?+7=8GH}Z*9(&kN?A1urt``TBS7W{7T5WXdiVbW6QPH}S1zw3d!s=`n&!>_^s zRDTs=sHfpKp(|}+AO=(=PR4?FzY7Z=8oY!r&IPMuT7+h8_y8pfJiaOU8Q1tJf_o|A2ptA(~|{f_el@yGYCwV6Ui z)ZJuiu1(lZm{bDZ)Xjx;4tnqO7tB3z4^NhJ(DNdk~(y2ZEYw>wX;)Qe+TEGnY}6V zaTZNsL%f!VF+eg*0sYDOSfrgU)+4WP-7|aY$JPm?o?-mRo)d;TSc>-dMF|7+_i)PB z8CLI>fh2UqvZfL>CIn(dD6Em&80__6JM&keLCzl7D8Id%aC=B;GmPhzfd#ha_?5u` z8-tEh@)zO-wpcHLPMb@5fOoRi?DDOc3{gS(WX`QV(=iMnRw!nGX1k($7h#g=E4}3i^%U;T)l;oNA zHu4(#NLX$al(9MC%C4NL5YLR#cE@8|y<|F4!_o{AvK$XAood2t6_p}R6BnyJn)b+n z!KaF~{PC;!cOBcs{JoL7r+Y;^f?p7NnBj1fvtVO5G@>6-a_ ze}wI*(94x8ht>hn4!&6DffZKr(gfuLJrssUh@|GlSa6rcu+}gMC}E#=Jy%FZYFPa2 zTN&$`?^mY&Oe;k}qKuSwisj8S21^05=U z@q0s}dL3-~Ze%ysJPqO3pvPA>>BRoMCy%$;->6ZWHqlZ0+n%tI?ee>67Q$VH9i_*$ zu7b_Y)vDz|xvRAaXS<_M?66_i&W#&(?%dchF`+}pL~KV3&)!?rNL=0ZC)176D!ce5 zbK;nUIo1?dHAZ8y{4H-%3Uf*|2u`V_?G_hTwjdzg(7)Sas4fp0ZiIbiiOk4#`@v#>3os^NW{2$;O_OmwoV&X*@mUMIWZXbsj=yiqD6$*|vGX%JKP z-!O?9Mn`&Mn%~GK&9FC@cj8OVH-K4W=Xw#C3(`0l?&NwaJ54gIdrZI4bVfxJe?PUuwqEDdI;bm4cYaqIy**U!B zNz4XKs({(kn+6G+|LcG-O$B?T4wC7BUmeiJ=_sQ{XPaW-R{e@j*xwryRrai80+4r^LRcT0-}i|GOkyk%{< z3O-c*^s6BuX|*uJ4BuKS%a%c{CWvrL;2S2uvjjW0!H?}i&w!0Oc5VaFW7X_;vsaqv zP=S7ubAj>rKCxdg&=G=MGIyl`!e%L_I}ss|1oNg3crggthmE=)n0)J>$1KJF?1j5` z&(_7-$-jOW4BnU4la{0!yi5!~WU)!<77-qa7SX|AdtbE}>^Y>w6U#kEx508xYRjgu z+#|WBLc`BkPc!LZf-9!QG(eOW6U-|d*C4K|puI|`C@>bKz#w;;faS-hEW1JUQv*#F z$V9muoglj~*BdqI0;WspPZlsfV1t}iY!;bq5GDKD*= zAmT_)j)~cx3`>s>jV>K5E)Cf-2%df5N9$P+Tu=OHJ?FAZa7i_aC^-yZkX~TC#=Jb7 z?H`0_19!4+x#>vejzfo1U{x|DB_<{_32FV#QrSoMp%`5u1KkJxh5eYDxPnOSqxg+h{V%f*S1( z^_rum4~F(1H^khy!`g$V*B-ue)<(+-u<#V?SIvzUyB0wzZeBP2!*5}jMSsa&g(T++ z_l(kUOU6*ie$Bog!elbZ&kq*TCZX3(x?LUlnuxoM+jn3rdm{SvM88`GN!}^%gHwaH zbN2K-U_SEw)=OvGY;Dw{bNAjOnC-G00F*cz`B?>1EdtTOo!%~`DovEIfsCS) z>Zf3%9*q#_$Mk?M{=6keb%cjP;)8MvQo~DOHHhUV`V}PTN+-Xp=!M%MD1paS{a29so&S4tM z$?ffRv6`%i0%P3^)0}1KUf{xiOU|>-Co#uReI@!8vrZE)MZ@;6+**`1N%-g|lJI4D zlJI~$Nw|r&6qKg9mksc2XBWs~FoSa6h7G;!x8*wd*Dj$@vxTCDLlp#KK_@-b#zHV) zbOlo=1XTPrusrL4&DI^ft~wA|bOWs)Ew?kZTit8l{#DC1Z|kEa*sn(C|I@700 zx&g!br*b+7RtmJQ-_5>fg3-%i$|M6^h`dglnRahjdF053?(vBoyESXpan(+%~*a0@+tg6XZe4cM^HqLOL^2J=Y=yL^P}!c&6b^+t2zgJ(sOrvts#LX>t~E zVw0$QkXLWeG(p}V>!7mt3;1{IHRYNCZ~^(NO*&_Efu_PR60J-S@eL;Mw^QI2p&Xdr zb**qC*H{_GdTfJQVx2DQjxq02@U)WWUCJ9gL7t>xDSdM9-jn&NY*BmLLX~Y6eBJ29 z4~7mNKiC{EJ@MQqVacpBGUb=-mANJd(U!Zz+>CjluzlgYBb%)|S8iRGhO zqf%SyJH;x|N2ZKvD<;CS&|AcCk69M_4ZZljSh;&=s z7}J=Y*@kmQ3@K!=l)@ysCE(`C^F;lz(1X-@DR))T(gM?mdS4}#arLK+LmLIzr= z98W{D4d8Vcl#beYrTsR1o4c6gP?D^Kg2|tTz8y-FIVV;v~$VO9Vx2y__BVHJ#W5kTc0Wrv%p z{id{4Vj)>hs#=X~r5hnA?};rx{ZJ!A7!mI7PCZ1IePpaLRyup0c-VmNva3W^85Q`; z#109tu^`C8QnRM%p(wA11}3x7`wNXUOu!r?1`~TZ6~+z@E2#!j5}Ef@lBDcMFa)Y( zl8UgYq}cR%S7_^_1-l3iBizg9_M&a=lVJs%^f!XF+Q#6LjyfX?-yu~&;7Xy1Vg_*@ z4OVJ>!1x{16o$#X%wt*r@mTKb5b7}>c zY`MCEa+H|wXzD7|m0(*GTgjx9)D{pXO3bD&9mDM8z~b^4*up`LooNsC$$P^GZ?%$W zabbL=PqABUnN(kfGOWQU>D|RpGRs~5PLOhQvBJTin?`IO*7kj zd6*NxK6%2*b;+qJ>&QY2xE;39Y;70L8plL=hKZh#w^C->MXG5X1G!3Lxg8lG(x5xi zJVnl3inV!O0qC&Vt{&4Vy#1uHoU^?=rzT?EKEqJ~@dAU9s!S!aSGxQj4@{ z)9yX(H%ame`GN`CZrV=zw#RPhmBOuO@4B?qn@n>m3yZj@gIKn);?5U1OC7h`(s%Fd zcf?GpTqe*In}n5~GQ&QZG*z-e1i(Tc+HYKcbJKRq4qlR|b0AzV%4XO430-2ENhUO? zmiCp333?^qI>ZRlLh8aiJ*R=)g$k1I$!;clGf{>zw?X=TZ=2IVzWEF$1(NF%W}r z8PMk2f^$h{vBoOU(%maHq*cul<}rJevY5ivM26tZa+t&>clFrh&VI?h&%`#Q2h^0( zr%xGS^A?$}YH1hcx{_^1%<;&VOfrWAC9=dAisWBsjcyb=s+-vb7RKW+YZ|k`CjAN# z)>wGAaJ?dtk^}qig5L;nsU&P%JnXeD0|vFn$}6w?(B6_rqar}2L=uIvP0szCCgW3A zSzHY8f`tZQ`%vPrZ-wKG+-g3Q(?vIFnZT8MZTP@GoJXI|gZpvKI6dBSs;Y%5#W_Va zjq~6t#K%Qpn?~Mq#MjVyt9#$dUqSxkBip+W3>NR?LSHTZ>Rlt(f)gC z|NXT8!P@@_?f)Ho6#MRf^3S7ekSETWbLFgDRW5)F<|4QzTx(!PS#Xh#bBD){bH|a6 zbJsA(`G%`@=HsFr=U$$Uv$CG!)AzA*=Ream)3w$m2(Hd&q>o(MmA00> zH!FQzxlgqKpZk8R1Mb&25?C%UEU;7H2Z751j|a*%tu>=-E~&Y-=JA?0Yu>B*ux54; z7bFA~4=NK>A;=n3ImkCCFeo@EJSZxtSy1bsgrKybPC?y+`UDLOdNb(rpkuYV)cUHn zYwZEGH`j@-lT_zeod?1CVAtTv!M?$P!NI}d!BN4n!5xD81dj-QCpaT`R`C4buYy+x zZw=lR{9W*o;FG~uf^P=j3w{`!9m0hOA;m+=gj5KzhExvm4G9bh3yBVC9nv+VU&xyw z?}bbXnG>=g3Zd4}%AvlYfuX^n z;h|BXv7sG87lbYeT^71JbVumH&|{&e>Q<m?R{XORAdWmlTr}o0OE)E~!gW z&!m1yro=iN_wsdAcIT1r~?w83ff(>A6ZPrI3RFYRGkb~=|Xq*qS& zO%F_Oot}`MmfkgeO8Q2QcN)Ph&ih}%#fby(04IPBpa&QL0zd>f z1BwBR00~eWPy*lrCGMN5DRDvhy%m}5&(&SBtSAC1&|6z1Ed4m z0onsP06GFX0XhS^0J;IX19|{@0$vC70`vy-0jvb90;~qC0jveA1FQ#Z0Bi(o0&E6s z0c-_q17rfW19kv*0(Jp*1NH#+0`>v+1HJ$xk3^)Qf3OEM%0dO2}0&o&= zMt!6!uTotFKt+HXzzlE)SO8Xl4d4N&1gHY23aAF~2KWGc0o4J10DnLLpavjBRdk_% zx&YLl4)v!;ThXJ9a78V+s0A0b;367aM1zZHaM3KHXckd4izu207tMoM7g5BEDB`9Sanp*pX+`|tB7Sg@M59QeQ6$kQlF1dxNH?uWH|<}on>Mvb%MqG^MOu!prsep&W@89j>URA=^{Re2u1B((;3U+8|>ULdA^`TtGJf9=&tsJp4CyS1o0iag#%-OVgecU_Rzck=44oqiK~7-|D`xB&UDlUIM^kfUM9 zQ5UW>a&Z^AxQmqUBIO6VRyfBZRtNPMFyBXOH~@^tfsvv$bOk2G04js;`WRSt0?S;^ z3@kfP!e zK(%)Gl{yM3-bae_k=hcZb|0xlBef++Z3yRzGXI$m*UJ4G%KSP~KabSUqZIoi2j`Lc z`Mg%IAeG}tWjj(JT4{|`=ir+tN@c9}Ezl^}>L~=8WhkNo_E{Om`5PNK#rYU~m6^awTf2sQQqHAeC%$B(G7 z%UbJu1Mh~TR!4INl<9ob+9K52cC?e(DBC@#xkIQq*4n;7&1G`VNY&BWnyLqp@?E5Q z7qvpse%CTAT;bdnexl=dtMBu7)T_EIbsNXCs~7#_Z5F4~^-zx*s;CcyBV4J!aL4O0 z(r`|QeGhm(dUBQd%2y5ydH;?)YB-`Y>$a;a z^rD)jcjnUc#ZW@*8?WJR&_BLu~;zYMf*bmSA?I~$>Ia87@C_0P+NmG&)^ z$ZdRKC9VFS-j(9x)T?|vAOVmFNCIR4?xAgU0WE(G&=t@P&>c_!m%cO`hYHmjOYak; z_X*Ogsg>wm+}*{flLh+E!YgQpgF$PC0EPmF0p0{)1m;En-U5sSi~_t3!05xh19%tk z9^idI5r(32_)lvOI;Y`WN2d?CE}-K+MJ=?br~Zp67Mt@EZT60Tv^8JO2{W?$m}}g} z&hsm1b5{Y^0M`LO18x9r0&W3{C}t7G{7<8pMQyUEO%}DuqSK&mGHxepnDJuhbpa;E5)LLRgd+-Ul~=}J4!PW)m&Qe;aS<49Aa)kyA6bxp z9EJQN3v!Pe;5GcDAIRzTgCJ`e0vL(w(SWbiUdZW5l;cFmJRWfZu-!+FzXP`W$kS}( zI01R;%auS{XHgpW5Tc;W?Jz=YLx}xI?*`I)jI>z&oCePIS_u%~W4U7`@FPP12)ScJ z@S~$3vx$bxt|>}3Mw8uLc_EI1p^qbUJVIZ=m;Q0+D|xt?jEKnvZ@&iHyc za}qHg0e^4AxQ-Z)bn%c3CE;t@qfR;i!$5FsMykv6N{&eunVk7JT41Ke+fg6^ir|Xr zqPViUrd$bK3|HbAiESx^j_YF30}KEGAOf5L#Q;Wt1Sk$D0dN771e5}l29)8tz@Ff3 z^^jJ>Z>T>Y{VdcWEANr0YgXo))lWDvul?N5YX$ewPH!LtdwY%3 zpgd2bJWr!6Popf4>&8NcHx8{aT5Fl3kc!2hUZ6@c!q}ND$01_VN&00Fb1V9mjQ%SF!@I!n02o#j$o)@{AY}o| zF<@DaKFrF9eaQ)6%L29)_>#3;Iqmyv;rk;24N(@+>W{$0+N^@FVm(zsOo|e_fHu$c zL6kfR_fNpI2bi+(We@VC+}8`pTJ~$#S)6;`clHHN3Y-)rMUhhq+!WtS@x2r|J&l|) zc_!0>V6sey7DNZ&cIcnn*ChN7tp}#@K+%sWWWtY7nvWp6%-203oPZR!=Y0)JNh?)g zpo#JnMXqln z*9(#3$;dI2z%J5qor+pYLw`-zFzx}y`$%`9mZL|gvHK|J`@lRC`8dGZ7bk(@qpdSa z0f^=dd}EE4I7&apQ3L3odM#&6bo`Ka=70(QQ{JKXd+ zfbTni{B=S8<|8lc8)u`0Ly61ff?W4 zVK`s}V3c~Ba}h?X8<5%(Ev2sN4x~a6?;cWFTp*RV5Pw!)DmReI4NgFa2T1=o-kil7 ziu4KI5Y7ePxlL@R9tGax_=?lOeHveJ3SV&=-%YhIn5NEPc|y$PhzkUS{)*5Y1Bh4PB0t>RxhrYFaj`&GYD_vOBr?p;;^t2S;|OZHNrb;WnO_=fA(6; zOXoP!IgYq*A?|X-`w}IwU5n)iKaXP}DSFvigk&5)xw$=Y+QuZw_eR z%i>%?oGW;G194b=?gaJ+i1)qt87L9chGHf2Y9V!EI=;6ZpcB)g;@;-kaIvriKA3wS zcQZl3%Rpj#n=8i~`LdjaFV9!ws`383KNrf^-~+k3d@a5fSC0>cE%tD}0UyQH=c6%y zX~?(XW4T6r9G}cJ=2Q80TnoMv--&C(_vU+ZvHU3hZLTf}E&yV59aEbglems}N zXYd(Z3O|*f%BAwt`A@kt{xkkFt_Q!6U&Qs~zvREdJlTG|g!m_qEep%n!bCTHv&RTjsRVX(hMZ zX|>Z4ZiUm2PN%u=ogQQ6aKuS*Qn;hK^14dgFI!{(1l@rbW<0F71A%d zXk9dSPuE1(jQdp=gSFBFT}xd{?sr`qT`c!d7mwN9V_mW?nIpP((EOA2X1$rS>#ce# zm#z2J`*Kw8r}yI&eO-NBE=M1&kLGgq@fej=eUd(v=k@LN9eIPkx4s|mtRJ8s$QRcS z*AM4Q>PPBF@}=~n^`rUH`gis3@@4eX_0##X`nmeKd^!C``uV&`zgWMHchhgsZ{Yp( zoAjG`fBpCR@A(?~-wjTDprO2>0v~Cx89ewXgO|aJk2X{>RN)&N0uA-}CWga?!+f0J zC3x}iLU*A%pCI%UUgr~qK0+TpS$IQugHI9O6h`o=!YJWwzMU{pn9O$&P6!wI&cYSp z3g1^OExPjk#EPPYA1qcDtMbD|U(t^rDb^H&_|amd7|FjYwh&wJ?}=$*I{&`dUhKe+ z6+4NY_;F$vu?s(5>?(HUCy3p}o_vPbU+m9M76*z0`6=QMaR@(E93~Fqr-_rqsr+;i zKYq42N1Vsc5kD0d@E?hb#6|pkv~)krRyeLU7mjOK@rNElUG3+C>jDR6>&#MqXTwJv z&p^8?fgW;;6P)fjVO{71k1?2ume-YsPFV$A1!%HW)K$c_8`_6n=c%iVxqTH~6+Ee` ztBSYPbUwKD)%oK(0IkOfcd;xtx_-LNSe<&nhrmshs0!xF|L{<=R6B6p#jN2U99Tue z{`kKjH~;*5POI`i_z+H&)%|KFe*4w+sM(hvl^n0smH2Y`Wg*oS>PD>=VQDq_21 zaQTuJ;K=_o@t=+N^iDmEHNn5>P=7<8p=vf}OgvWtl(CP#ufD&2uzrNW)nEl(=_d9Q z2Z#&N--_dHZ>(wh==)%G(^uaY>zDre{?J+)tRKud>qqEEV4mP=aOH{_tOhGaXHX@s zI52d>ylQ|rfD^@qz+?n2Ka6-uTuE)LE5&!_J7Yc6oA1xL@b6))E5VQDCx9o)M!Aw0 z&A!5uRs33TV%K9FE5Yx;2v&mMjS;K_e}F%TRp=r95W=%@OyuwKzi<`#d;C4beZWIH zi81Xl_!;m(hVXXYjy(byx(v>!TcBIOnRM56KXa~nNne7ipf9g4k8i7>ugFyZ4W7oC zg}K5!uDtNE@G)0T_(J#sPnHN@b2edxumUrpOd%6*cM5wsi*R3ffLRC}f^k++C+fKJ zVsR0?KCz@&lCy|jq8H~bdW$|N5q~j&a}#Tbf#?TLL+-QQFm&0Q$GLXgVpOxp8Bik^ zKs#;&sQL)MKj{HN=>JeatTkWhP>-m0)J^IRl(z*2%q(|3DQUPas36%fhKwwEykT#!-i;OVrodm|k@JwGvPl zt98{yD!3rHI*=b%eK-H=f91?(o9aGLy0egPa_V~pXP?@;JzAgLf&ZVhx#K8=nu*_0 zP{T778?y&CX|l>-Il;uuTqz*pB&HAImo0q(7!R5 zZQfQ_vyd-Yu>b=?+{0>QuDVoRsD7(nRk4o@KdSC!zb8rjmtUyws#750dWFML)@=TR z@jCly1e6})tMgEf-{RL!z4`PVo@W-k`)|1hk5)ULZ-4mSDS*2S*#D~>7}5WT5AiA} z{)@sCF432NoVu{^yH}&Fh2#C-eS-FLSv{YZe|T^!_$8ob^UxOYRy~j3eT|RzOo)P^ z*|pk59R*#_qC+bI^=A$?h+eHQP}X0g{IC`;7(<=$YO()&LaD1z0^h6K)vwj9m`iUh z_+_kQ)Jy7jm^prj-zoMkf3D5enyB#{MBOocZOo? zYqbTa+Uysuuj4TTj8@-5h)8wdE3U6$6XUy>+kYYQYYkJ{jZ## z(m9P2xuoECtBVTWq1|r6sB$Qe-=np;=le3&Lw(g=FXl?BUuhgpChf{px1j8YV3l(d z7{{oef*b#fIu5_n>NapZzhQD2tlpQZOP|fb|GhhP-K*tUUCqK_tw+@#nVigVJj?IM zfBN){72jUT|&w^qUJ_LBo>EZ^18;7FfmU@ z+d7XnS3vfIl3%ZWuD%0&d9rC%qEBPu`F-to4{|+L)_I`{hWO9FezmoA!SBdlTRNVB z0yxI-OqAm}l;aj?Bt3J0M|4D8#b#Z9d_dx%l_Sda&L4;T|M#snf4r;i20!t|^GB?# z(Hhvl#tAB#KmL{a`it|&Um5P_$DF^C-hV?_P<%#zK%2lLoL7)Wf)ZBJoBzGproX;dceUc8r4lXO*&cd>EFB7(nNZBdAo#hp-{g59_AOt`af2VrJnzWJPO3xLf(-- z<9b?-YCBMfe`z`X(Tw}=^R^-9#(~w30}9RRHCf~&Kj?jqbwR-~&(ZoEd_JVkWJh)f{`|axD{wIH3G@yBHOC)GOVyMA zEDaW4YcGW)vf7(Re~w*i(O5;Xf^n#C0>%R-<>Sz}5PM!a*VI3c?*%>bI5-^VfQQL_74+!CkW>!Fc@KCT z%fSCTg&qw}XsuT(;QJlca1_FvpOPbmClquM?p^cW{j)Q6t^K9rDSx71G<725VPIz` zr|~%d0OyP2=i@3|lKF2OXV~$@q0M^^=RMJSaeh4ZKEIUm-k-(p*SsS?RQ~`)-Ov1S`;$Y987Be9Nz%F z=6c~+*z=k{H!LL0$Bg^v)4DDkx9G|L`x5wbPMJ1$`VQ3aG-f?{t10z+M@~T>v(yu; z#{azMBFsM+U-C-^vcjKmjaHw}S$y(_ve8EyS+8sV-~krQRpHdJKJT9&`+xZC^FRFU z|He-**!G?#^L(p)`Ub7_3RVXWKBYD{)aKgY8sx7;aFxdu*V?xhvS9pYuQjd?@Z|T| zXW#y(-NE-np&tD-kA=&U$rk?naKvL}t^v|uM}8d?#_D)q_`h&SR=S15{rxA;V|)Jo zA7K8AQo0wX?0BlJZZQV@k>u~i@$>Wl$8YmP{grdUn)ugYe|C=gY<}|BrO!UizkAwS z^Pix+l>9cwp0W5E6p(N^!fH?R#@MH^3tr`|lJi!^?9Iz^gZ12VBOuF19=u@YRxlNY z6{9`)as{#HQ=ex;{?)s}{OLc!P*8g`KipG_#@=cGO{!-P?Wz3ivlrdH2!FwFdF5TW zB=dTI!8!0#T>1K(PoEUT$*%ueDIh$fVH))ZS6&+(3SunW?j29Gx!d`IA)mc=)b5jB zorjmDgCFDa|LKvJPN6o7UU|%sJ4(OygXuGdtt5u|w!>xPSRS@}#g`(dt7-7_B56 zx}L1gAN~1y;AOZWAHwftZwo&!Sd&lRI(m$jLzc@w!1v<#`I!D}=XYMq2R|{t3>`7^ zTfnoL>Fga^?GsJ*mt%h#CO;>ydWOt>?w^N!+CmD?fKYoU9UM{zXG{eLM(cB5IPl^< z+v_0XjQ+-Uc|Uug{tNVYgp;*!1)e<{{;%8>#QgGj$l2daO?``D&6n>#n*!VE@%Ik+ zRl@-*cn;9g(4HK?Odab`Z4b#GYUD$f%65l>-({W1^%stO{{8b&^6F;?zL`NGoGe~Gpa1!|1@D=}_N9E^f&pLQIzO+Dys-$+%7b44 z#Y=;zqhI=pgO$9$d8k1wqPA~jH_pAEN275iq22Rm?MfR{W;tTB+_3P4HC~^`{#Wjw zVuOam*ym$v(w(D)IWo_OL#_(Vbqa@h)h92SnZIh>|DAC8Bj6u>@vGF_AH{!_cYk*z zcrosR*a}|1IP|~w>0h1u|9f-u=h&Z?*VC&%j`io^)Xgu)@aM5#7FS)s-mv%@6iP#T z`iB$+GC!tC3MoeZk)Im}?)<0!^jt87r?_6F?>!C8t}x^K|JXYd_^qb&fuH-{``#;& zm_!T8o~=UGA}U$3Wl1Tr8%q({$`Vp`X{boDgh&&UB}0V_g{(uOG|AL7#w1dynfL#F zo^$Ve@4Noq->)?0Kkoav_kHeJo^$Rw&w0*sp0hl2b6ui0s{3J(euoeewtqz(LT+ZkB{;({(pThPgg22BN4CZ*NarWrZ}SbYY$U| zan1kB@yD8Dx4cZ}XyJluH;sr-);xFXCg$4NuOF7=OiB?Q#oEESw!Z)8sEM^VGD?1> z9S2H&^=53h_O8M)=Gw!(E-)49cM4Nw_Py$PuykR(Uyw`56;%gU9lkv;LrRC~^U6cj03>I^BQzYN${3^ff z={<$vGpePD*5Vf8%t#M}_J(KKGSSt4pB3V-iHoEu>Djy~%-X`Wb8#Ab$&1yRWxugx zM3cE({8W$oA^&-4@FlWx=*`mcBOeVH&ao&gX8xs+IuZ|*<)bgzK7 zVS$2j)b^3|*SOO)JxQ09X^bZ*+e)s?YnAA!BTU6+uvB8yUF|xrPTN>u)2#}7l)K#gm#UKx=duEx%O0}`Q}iF^`%_Bc?Z_?u zkH+WWo+_ck0u16-J|_u$iP&MvD=qFM<}9UCH}O?j8roJWq_7xwWf1eO74u)H{ikAB zt)5-{O895nS#kdeXL;XjD5*eWBrgwEhaCb=3b6)X*y3?GWud8gE0f7pp{mV=R?J znR_Y^TP`FrBI|#{tH?-0S9dJdxl-x7vQ!jB`SstH0Ck?=LAj-(npfrJ%3@;C`?a}(Do-=O_=h)LvuRjeWlB?Rql^YWjV zdF8BHcG+nB!qJ?jme6z4qAKR4asF(+%0F{^vSMMQlFap!cyfMKW@@f`{#(??=DX*< zC+j&Zld$C^Q`a$mQjV-H%AwsTto%b~&twh(6TG!LS3yoo zLhQS7vTqPpaNx&t*%!wsDN;umcReWQv)FF0&bOnI!=_-knoXeWAqJ4ge1lPz} z(#xE=Mkq9LVU8m`Kp}3<1z61v!-d@P4ExJCT>hB>nU0z5IeWCI=1g!zF~NE@W!Hh5 zRrvOjo?M{J`~2h#SUqS#b)=#YEQ!7*a>I3A z0-hGFpF^Kjq%Ts5Xa28pPh4Wfp_r=`uKlyUK=F9RUFji~#Xiz%_0Hj(;;+23z%`B; zLCJnHH#b6O(N(xgQmIh*^U^3xseew9n6Da*L@$!WT`FF_d*(h;Tdnjq9yw}q(mz=y zk-K#h9w_}s{sk9T-Ge>RUPmKnbR-SABprV2KN2iC{htkxc(P7XxFqEet(lE-BeEY@ zR_oV~2A-)s)l7*jTulFkC!#4yo)Tt#@}HEWYL0asAv43WzL~yRr6$r?UNIL;Yv~uA zmB6}()m_nEm zdbN;45>k4i9NdZ3C}~D?&uZQTd757Xo6H(N6M3&1S3Dxg3tLOdOE3A4;LwS0llg}C zv#+Irf|#X(MrC3DlUyM5akF`?#XN8`#Zxi%QmJO$%U~+9|2!M53czBOXLRR85X?%G zIea81NWqK7k?>`5To5UC$f3vBIfjb$vaxSf5L6wncQ2B|U-cAKm0->MBW_to z%0ivX`u;!mq>SDyQ;u_?@THuTR4K>)Un@rkE3dyZi=AD?E<9G@_rV%`TjwCL29Le< z;i(_p{oMnwhTa}4=pEfdv4Y-7te?BbVcC3NESh7@99!mHypz1HSTjGxJJsujCG(5D zKHe4BGVhNK^C!Hq-URPCER|!W+?Fc^*9-jm7u@638wx?haFU9oRp zkFUUvt=hM*ulDVIzUV-+0bkV?d%g!b2bsEjU3-&qI$-gBeZKNgvz=OuZ>^T$kLQc` zH{0{o2bitZ4*bb#2mS;~;1IJHrO?srO-UST+Ne$VZfX<$5K84}b0j5mjKMBEmf^Qm z+wZ%n?f0%~`+W~e>p0Vn5v7;iSE*3LB!HTPSS7Q6A-9ma0F38=*@%!btDm1ZMq^D473HF`C+ ziLddmF}1}eyxEi5?Pl6j!zY_A)N&8AD>Z$J*-7lfn@(aM-W)9U;Y|Z-{%o@&wSSH| z6bd-k94@xvv0sd>cyp}SiZ@NfKD=oGRSY)=LK&mXeo)7QSTr6DjWnX3pGK?F)EvlZ zzT25A%=TD~Kfvs2_5f~Wj^TN%ISH4prYG>}rVr0c%njx^bE6q(+L@coP+aaY5A%G) zyh+Hn%nabS`5j>1F@FaB#Qen^qW0vQo4-4?%&pG)&IV?8XG7;VW>2S{Q{NotZ00mE zdpS*=9n1;Nj?PZzAZKT1XLEp3?G?^$&Tdd}OQ$8zR!%EyOYiIK3*5$O1AMS^Ff`rH zX$RcV>4^I=4%X(?YIRqqi__h7b53zi;n~yaX>L{v)&rf>o%3)JJJz>27de-jd!4?{ zb-0L4>mJSx&W*rg+4>%}Y~9tl)fs}z-OdB1yEDpp1o%oN-JjE(@59d|qHDIxg-NTvYyag;4vd?hd zcHRaSJK6U*?>g@S&vZT_haWq$fIoBQn7f_1&X>URoWGEV`OX61Mb08%v8#Qy+SNW^ z?P_;*{^@*YE>P>*y`Ar!A534hv3-f!*zV!1a8{TRYH9mq=O^bUbEvb*S!E7$Ry(VK z|K z`0&&bz@1W^fR9Wa2Yh_$c;FLKCjg(AIuW=_>SW;VsqVl%QaylAN%aIiEp;03>8aC! z&q$qVZcCk&Ive<$)Oom{pSl3}!qlatds*r-o|mVtFt?}rrTXz4kQ%`A#?(NbgHnTd z4o=-{9!%Ym8b-(wsnI+iPW|2-k(!X20G?-3&w%sU)U&{UNc{nLVrnArbEy}BC#5C< zzm%E^{A%h|;MY>qfZs^H0sN=bo4{|S-U6PU!Y)ec-PF6lGgC8x-%EV}{9)=N;Ez)u z1OGYoDe2Bi%>w=`^%?Nzsn28JO+qv7}vc0=KzB{VhV4&1_RX^wSwclR)z-96pCfcJLyHHW(U(ckRjwsu>a!`(J+8&l6c&^^!uZdv$@;Z?F@Xpdpz(7?g^&8 z+r{kywv*hGz|ht03WjcOcQEvDPbExGwF(L)KKC5=Tyu(ho_n6@>GpPe zn?CNv?!`PWaW65$-AmoR=6?4w_gcbS=iX=ry93>u&E;yn{xtVK_dfGG_kQ<5Gg2+t z-{g*UClc~ycQVf@?yLB|?oKzSx-;AvxV-JoGQ*bFsI%*MN`>y@qDE*T`#ZuJE?-XusZ; z-d4a(ye7a+y{&<_@wPRGd)s;21MlGNKo7m6wddmQ*l?`gBKH{Sa_DL&&p!}D4154il% zd)YMgUhyU)Lrn3em?qw<-mAF0?!Au7H1ADZzE7vjPH8v2iD{hPJiWQOBE4ODJF{MT z`}B?`m2R2d)41tY>Ai5-C%q5Pebf8l(k9&vN#f-6Ddx)bsp+#!hxFO$3xIp2Z{~SR zdI&Cer$+$apMHwxxb(~B`1C93$)|9*3=Khl2`_c8t$!aU|bX@>dZ{qg1^{~7-k+$Z~! z2|vZ3Vy^M0`crXv)qlfW@Bhhv5BMX0uDRO((*Fv0f&W+Fh5jN!{>}fV8RakWmzs0@ z@BQ!1!~PHca&v>f!e2qimHtY?{N(=xyecr}uD}UWreEL&uDLGof;6xn_~x1*2x%ngSx<*1)H0zg9gDCz}p1dnEt`G!M4QOF4!KI9fBS3-7#op zejDr*>|!nqb`5qjmj%s(7Ut5RWzdq4y9c|Q(ZL?UKIY1xYtYp+3r-DA<#|SM2G8?? z^UO)X1;GV8dj-9CULIVIoOeZVmDwN|5DYMNg6o5UW~1PyV34UB3=Rh4a&vGqF1G}? zn%#oig4@jY!R^8AW{2R8;11wBgFAum4(>7A1osBRz&0Wn!SlZ0eq0_29x+$0*JQmW z<^hvB<$5GSbILboTx5>qT~-R_y)1N{AbZlVuP=%{+~cuT#^|= zxoOE&_Zukr7bv#{)r&{UbJ4wDnK?i6An>{TR@V9jDw$+1p#v?sq}%^CY-ygSXWaafpk2g2 zIAXqk;XC|GR+^x{FKnZ6>k=rs3vpD#C4e&)nRBcLhP4rt!9s*4qB(Z6#wEA3l9G!s ztoc72ZFwbY_avM_d|KC+lnnL%X>PjM0Q#4>Fgy5@-qXn6^b+f`WVK7$(oilI+*X(S zcAIv7oNB$3vh>lU^ew&oGG-OLwKtnxt(=5`u~Wlb6Zv_alKX=Db$YaCb5BdG1TV6P z)<7j;<1kv6Sc8mRl92c&FEsBxyY>2f6An#Lh_KNz}=zZP(mo7h{xqM&7ccX9} z7q5pc*xSIm*L8hK->A3ru`-$P2%L^rEmQQCAU=M$Ny}NOU|NNQ=8CV zQhF~_a-UY8%j8Qt`Cnb`J<3D5Fyg0uE9k?7i1Wj>Rih9Am~)@9g; ze^0SKX;1UeJ`Qc1hLAmEkC9q?_NjEi)Ho1|VBq$%sg7MK2MK4K?e2UecP67I++5<42&+>G+&Ys7LW#Sxn}T6#pBp z%yVJMjil7%cP}OPa9DCj+mfr!cZhXpYNncf9i_VpP5oKulkdrV&u-ECDc^6oSFxTF zEiuq^M)>P&KW_0amRuJ4u(R}^;!4~7MDHhPAJ1i`#v${y!F&t!BUf}*O?b*zcEk^b z9D{N9N)+#;72(?M1@z4GCy+ZQARJ;5jJ zGyQ~FfLB;4JGFeQLSJ?t7pa%RaJeov4sth|{pgBi$$X4bKIC*(w8swkZ@E-1r;XtX z|3(9u)j>v>Yx*xMv9sgIrPR}p@SCNkv#Dhof>U;|cjn$3-QiJT8tuY=W7-NtcsLYa zIa1wag_SX_MPtWx7I>oT2J;|wUN?FT|7EY;Cg|yRH7A5F>*a;0;uWs@J905kZ4k@` z`pQPIdk~^;!%cF+q$ugNXU*5Gtn3O{7k0SWki4GAe>Ky!sbrs`qpW8(VRcytvn9VB zSy3jx4*ZVeSKUkER{K)wkW$}`b)9L`f}b&cp_Z*eR4zX4Oh>b+ywfh&Esnk2?C2CK z%%Sv*4r>{D|M5Z5xPUSUVuRu?9Z(B4#*0rm4t0ZT7no6hR~`S1!wqTge+e`E|s-nI8U1 zB*hP7eTnw<_v)T{`M$r#K9%6DJPc>QWlJv0Wxt}_WG-R}-~VoO(m>@nC1ieIQgv+7 z`xm9#BrahaM_mh|GwVHwRFI7#FQh&=UwmaA$A1+)Nvwm4J%1*M^zaqA}4wUtUlb3$)Ov=!|1 zAY7Jme&IX$yGlyW*}0AFt7WK?#^+$m>C zRrf+?_z+n4<16~`o4t zv3oA0Yq%DLV~0fS+)&kjqui!eFBaqfbD|T|*#Ri?LywwTTI&6vE#%(h#faf=eD8U- z^u-4WCAV+oEqg*mGHNwp(ypi#R1TuN#q4ROViB$`j`*qoqB&vSOlj4C2~w zh%T9V<`C`!xQ}9N*k5KZGnewaF>|9?&$bjv2}%8SO;L4t@HEbZLG##+k;nJ+F#Yg* z$kf-6?=dfpoinsG-^&T zwKBWw7;iVzh0*b`<}iL;_*M7nWIBmtQ@v==%u;k#?f9-*nZHY1ayCf+m%_1UDz>_v95-aR-y@BYY_V>plM`>=2A zMxI0ue}(R4f3^ZVMRbGYM);JhuPrm8X0&%Vv#LLdzAM=?SDj}05Hi&a18&!-X&FmB5_WLr)(8)+i{3Q8^u$MGlj>gi>#)d2mY0&E z%m%y+oyj}#GGxw?- zntO*-h4wxSX>O#(c>}33p-Cy0gF-jH;4JdgpOxTOGm93{9MTtQETcr<1)5P9FW;ru z*xAxQfgIqk%PpFNv z{`h??Kf+l)_Y~(>Q0`#+BErMlbb2lLE}UQZ;3&o{Q`mv=2%hPF;&lBn@irm)SmVO_}mZV z>uKshEcPhJ$>BQe*eHiL8Ma)wa(+@mu|M2u5E9WPj3%#R4)i&sx}}*h*1u5pWhA%< zRIxX|z4)yXz6bTlPx@A&z)8qymgW|ObNu7k_wgKOnMmKd6h8J6XPketCILyV1Lw1x4i(P}bN>?aP>&#j( zMY$|1XN^4bJAQwmwtu8{*CwC)qdi+TH}B=%=vl8-tBsHHjacU3H&towU(A}nMO$aa zK}%X&W~N!D8SiI^*4$py%-0bt|DtcmyqI*U49=%h`u4GLOz1}V>vTCGod4DNbE0v` z7>ZA9Owcd6my54NQnPUULvxwooIsqA#b2+c3-pFYb9l9u?Q@K=s@PvhnKW*CZy)P6?k#S@3c)6}2&8a3hNw>Mu zaBiF;Z%CVKeJ!xrZYrSPmZ_ciY!>QpB$zm{C0v`Um*Oe2Y~ zCKHCvT%Vx7j>!;(ux;&(!=x=r>leO1x-eYq61F6E8HrnND~m(b9IF0`mg=NcqU0{H zIR4v9t~w>LcEUz0xeLeqYkXUVzkFwFnxpUAK1DP^_{ zSyCkkdi5eU9*mWArI^s4%|y1yj^U%C%jHx$Vp9$l`fD#oyk(j5z`?9Rgb}w#6z^<( z7T}HWSMtvn`6*bmsm`d_?-B&V}YaB+GkqOTT(D z_rBcca-Yh5AooRtk-FQ98C{w0O*7Y<{h>P?jvw>ED)H7=xVB;5I!lSCQ}xM9UAszMzwMw9-{GvLu4j(t z?eH+KfqqAE>BD|uoRz>t`m~~>#mq)FsLsdT!8eM&L`t>DCA+H`d(PJr9OO*xyKEp` zQ9M145L0&q+4hd^PUkynGRGp=!?Hn_NsS5*SQ)0jm^%70dc(cwmnQP3xrpCOLslD9 z9AEWHPIC3jEV;NG*M7-`^#fPdGHlBGw%pQwcHwRn-gWsGKe?m@OIfXk;${8F(fp3U zD$|9m-Qko_DE|;#4ku9kh_WSy?su^K`c2}~?v>k0tKSByI}0u#^CKN;(bc*_9Swa9 zClTDn^SLnk;q1z2ZDFNq!7I{y1G6W&?hJIk`pUQNX*!kQ61B}XgpM^SSxEWALVeQ9 zMpx#N2(tfDs`6cSeYdRY&-KS=O4cR8FDzY$KG_~ZdtFAg@g-k|erkT~QaKd19pRnB znWxBGyKmR{SiS(x>~|}E;p8>GikDn=&qjzhE;(^1EVuv3l9LuvaAkyoxs>`JKV40SFKG4TbB8=b-8iHs+(NsM!(f(6+GOA&VPoZ-x|fu$5oy8664lh7RRq8 zSH3LaFnN@hr5oXGVQs3^u|)KqgfH2H)5nH>S>BS9xaO;-B$AJ~M?Lzva;AZ{*kjls8mLUSAv0tb5hBf7Jz~};yu{#N zC62%ICabgpan8=DNxQB?jQX4nTh}zwK3m32)w;yeCOBtsk6=~B%B(!Mq2w%F+ww9& zMBgO3pSpY@XO#n8qVk@s7nXZ7T=FmZo(+j1UuLh`#HlSKN|n--&&jIL=*sMGA=zE1 z{hnx4kk~?bE_W5CU`=O6L~-?>bPA-lSe|1{CL_t1yV~`);X2hEL{-0pfWnfy+UB^hbAJHeU{-y{G^eOSDYLk= zw`MdaS|)ZV@LU>yi>&sRD>+XtWfQImUL;n-vRBLlBnG1(dueMOAH9Wcd;u$=Uf}sY zH>DT4)7H_MXi2VF4;xvCi!g`yi`PlB+)$OjDeqS4E0T9~lqKA6k1)kgfWxwoy>y{# z5q)shFSq2zM(*)HCB$ey_X@c!DX;03)duDuDlS{*1S#`UeD2`PDe+Ia|NoX;%503@z9%JdI@X1HU|r~M=1wdJ?P3-zY zv-e%Gku_EA9(kO~u?uH-?22Zm9$KuUIPKzStRJn+l-fgD&)J1isE3`NRi+gddj5^gB(crYN^SG( zrM7umscoLU)HY8mwav4a+U9A6ZJq|EJyv-dn+|G^r@h+a>7e#_+N(XD4r-65z1rjH zp!Rs$t3969*yGvH?3UUedpymt$8#WX+f-ZNgHj!^A9G0R5VL=(W2z(YVX4DSi_{TV z=xM1IdRnN3o|bB%r-fSRX{i=^TBwDdmTIA=g<9xosTO)#sD+;0vCwmt*&}r}7JBx? zLeKfY7htDnPwezu#`j*Hy3*{Ox+--YmT$yD&)%u)Q#SyMou0i@H>GX@9+Vmkd^6U1 z_Q6`uomkDeD|Ih99ERnd{h^+ROgk+1JPx)eQe(|tsV7rU0gp?K1AZEdKCRTEPiwX1 z(;8bo&zUx<=Tpy{)~Od#F9M4#pVrv&nFcKOds?ggo;GT~r?uMeX`}XgTC4q@HrVg^ z*c_lXds?f_o;}oN&w;7gsoAEjTJAY8^+oDS;Ca~ZIZ*BQ9HjPp4p#d;2VuYGALd}S z+|y1i_q0o`NUfw!f5KW%2esDIUaj?XP-{Ky)ml#nwbs*Kt@U)kT2Ecm-rX2$J%^~Z zo{nm*=TNoQ(^0MU9H!QK4#!&0_NF6tdUn7D&yHB@>4>$S-GH0B&4F9EEzA*aORV*D zQfoa&V6A6w)56`y-Pg3lX3u`WVzZ|;t+5gIR~lnK<_PS^Yze%T*Ti(feoRv=rfiM< zn7y$dvmNmE*pJy;?Z-4x`!Rc~{g@_dKV~1=b8~!KcrDC+UP~;;v{Sn&j@nI0tKF0U zyD1l%O|hFY$ka*?#(GL^wVtw}T2I+Xt*30H)>E3P^^~SqPnl*mPQQT-mEWihm5tJ0 zq<;d#D&JtSO)RN2P)jPCswI_1YDr}awWP8Ye5AE$>bJq3$~J0GWjnQ}vV+=FX{Pp6 zc2;{TyQ)1ESM90P^3U?mGMmDE_Qa}NE9`$X#RA(AaJ!~xw8a8jKjlR0sRg!vaHBoV zRq&%@a2HOro^qmo%vGIcu2p{2Px(YugA)=zD+t*4x*pK_x0loRz+ zPPD#qq7BqOU<YWG)JVBf zW93SXlq)q>uGC1mQe)*xjg%`jRJ&n~)NWW~wHww*?S}PF^-c9PH>&lp{wc8@HY9ar z>MC=g+7KI(>JOi~Q2Epl|bpBkck>O!?DHbkw8-K|!|`m4RgfofZ+SV6H>fqtd(;|c2epQIo_C3Ni5a1mwXXL1 zdVS6HYG3PW?{e>QGg7T=U9Gk<_f%V%7kgKGSDWj+YrJc)4<>dq_wvLBm!~$kda4bs z9n=Qbj%tHz2erY~Ol@%Oq&B#AQ5#&%)CSj1YJ=-swZV0c+Ti+~THkWj`qo`)ealtr zTX(5-&i&Oo=lyDl%TY^Qo?7ZWQ0;MfYLClNtDSAsYG-@3*V#ucbnT`Vx^`9zUAwA< zuHDo^*UoC8Yge`Dxu4qU+D`3sZLfB^`lTDD8=7mJZkgVar`Y{$rFK6LO*c(9 zH3QW8=iX}3E3Fp2PD?jWH#e=-u2)L!dZk0Fpkm!Ct=7F#Y9aIh?W@k#zG|@cRkhT{ z*WmPp^jL$`($`?^t7>UqH8_1=`aW}u_E>|p$J$hTtU&F34OV-j*6P<_wfa>{`>Mg( zSJl$KYOwZIwY9I>P%V?TRNG%0XpzG_qLt2WcV>T>O?&PeoCgVX}pV6{~> zSZ!4utM*B|t9{ZtgYH3hbE;Y??Wln3;b(C7iI$Et`ov+rhI;nN6VQL+#lUm0b7Tguw zg{>{Ik#&UH$U0K3rgl?HSx2g+tRvKdYFD+O+F32A9;ddm2Ak9-yU1FjO(y*2ujV)O z*|IBhIVU}yrIxU!$bL74K1O$nC9SM2tW&eLu+AjLLUywr$I6Gu7M9q)vevL)|!0Hj#06Qb*=1BW>4wYjG6xDF3Ncpd#96Dt^Up)#onxKS;y9`(D{TKio~f) z)zH4gBmB4$lRBs*9EbQ7N|r$Wp2no#FBy)sgsSPvY7Bn~InGRln^nuATJ$YdEa zoh<9fE$=@~zxTJyBKEW|%{}=wFI@IjPZ5w>ZqM&tcA<<9A$zhwbT+`U?AZVd?ffY* z#%Iq4c%Ibk=>XT}oet2B(*{>%Iucq=2pGe;0pBC*{5hL^wE?83F7^~#&e@p9838$K z*+@rD)0he!3`f3t3V6Jn#Zm2KeufC_@Rxa{#4dlE2xlx*vD?2`W{__Bi90kuw3t6W zKXGdNSWdsm(hqLWiaf&1^Ebdh=rd{JKk*;d zGpmXt%1z29*%jG(Hjv~`c!sVXKGEhjs#Eog>X-G*f-j;jP6$b?yTyGC<{fFXtZkQm z534Gi1#fdv{u$*%bCsP7mG(?7v+bE&R@I(Ai_=khFV&vku;H{lKdPzxQrffDvz!VS ze~Fe#v}d7gJx8=1)w!MJwe=jXSJryAaBd3KeM{8yKBPCMrir=xSIbC`3u zbA)r0bF_1ebFACOJh^R`^Uv|m^UwEt`F;Gp{$+kYzrTO2f1N+TALI}AZ}x9t=6es)-<{c~J)J#`M?PAa zfV}JlOm29#c3R`x#%Y850nP!q9PAuyYLUNoxE$gfV$$UDP{JH0Gg##GDBQ^{VUBf< zHEHtP#?&U?2bwzMy{*}p{2ydC;2YYR4f&1^W+T4kFno`2kHGgx_eh?{y2qML__of# z$GgV^pWvQg>hhgkz;=?$d_3RV6%5_n?qKNQo=TXWZcoCW=ALHi^6lpshwnd+=lTBm z#-$v3ne`}(KDhMtvG_olT!u?Ozn@v3vgr?et$(fYDWmHMGr%7}m_hy^+$k@@+(Mc8 zl-iTj+-{~Twbzp89?l-7hnAg7E$)kpl$YzYb=sQl&Oy#WW(zGfPfN{p+B@w{Bd3GY z!8FzqJfE8GNUUQhHBZYc?H=GBU@p*7TTe@^ww9Xj9_$`$_Sdot-1csJvyYZwE%y-j z5OcBH(d}qXaSwG5HGQ;9*K-ed4>$X1$<}r|xt*AmmGa%gJ<2_ray-U8#_X+Sy@8f> zZTC3$IOa&C#5d3qUtdc+ttIYji3eKZwY0=H&=Rk$CBDASXEfCLjOO0)-tp!VozK`; z=QDcge8zS&jLmCk4Ert=wF>P*IkI+L-x&Sadb zvlLtDEJaV9nP{pr6LoZEqKnQ}0Wp3Y2csxuRt>C8kuotfBFXC_Y8 znTc*XGjX2IOl+t#6Tj7&i4ApT;uz?bDf`Ppz{;;b$()VouAlV=O^mw{KUpO579*DA@gpb^QmauK!@w^|w=9|3RwjKUj7BXR5CMK-KkMs=EFIRo8#1>iUmXU4LuU z^|w`B|FNp;Z>zfg)~f42Ms@v%sjmNUle+0_k<zyJw)$&EwD)ih-&9V-8-vSObQxC=m$Rbj z`&oO4q}P{|@x(HzJvDN9k-3AzIQ4ExM&=IUS%czp2A^_<@_6P9?2JL4B^&7%f9aR( zHxkJ>GcV4muA=LWo2;>Bm3eYx6NeHgJnQBRpnI%Oc&2cuJCc(FE0fN1@mBJtG?1L1 zSiqUw=ioX~eX#Jy9{N?#z-#DozAOC=m87-iVW7mpLuifO z7ikLjB_dz*#>klG`#k3p>jh}~RZflk6O<+r-3#HEd5(nQem0U&b8{NX=IlZyOjswm zA@bkImxkj%E728SiJ2cWey61_(!ItnaK`GF=t~xJ%F;*V{EN!+5M!ztB)b|74etLlc{1-_f3%@R3CBV->o z9=+QVdxAM~jm!Xr{2&{w0)YI@S3WtL`18syrc* z9rASP%jPTdIpK2JbClnR(o17lbDvylD2~t4u=K#TRO1<-V&N;nSS((7x3FD_yg*qY zOX$3c%=*H!et`cbN)SHwLxjz0;^iVB6n6XT<2wp0Z_a||lLw*ZC!d+=ONlD^b z{Ic=#{mQkK{IJnGd7;#{q=rfQt{vENpIBNh#2U3M^7Ak8)zOsIX#cS= zbnF7x%<_6ld#zka+K_x9aZD7%XhT+DH|aapWF%w*bRF4J6Ue@^t|V`qM~^;RX?l9x z3#^swO3o}jNa}A^4<|B)SV(yb8Dta^3?+^ghKCs6u2zXw`ck;y&v!{}$6O3}l1)n` zAJKcvD3`WfGgxRHAS)YwP%ieG`4#TtEQOEt3znwm@jdiCJazwhsqjCCdxAz6QP-W( ztmz(0?K}R4OICu?{6R&9^M;}Y%*~t7y0#Gc7wQuYp=ju=>*-%>n6SMPPC_Sa z3MDwFMnoH!|a7YQN=n&R<-Oj(%KbODyDkfxLGQ zHtbdsqZd{XSA%^Kc<;=dN(y4bb||Y#&f@&M@XF4QPob|Gieu9L>>%2C!u~9I=`PVC z3$&SYLL?Rbk-q`-fk(mZzQNAmKCHDrA+x^PjjfO6_3P=!p>((h@@aJGS1b)HvRZo{ z-!`>8R`C^Ew)xyb%Eexs*H?qmhU%1y${dv7$&u+8smC~DNW&$2tW|o(sPuUewpGOZ z4s1UrOWfvDpB3zw$nOUev1fD`!?1g{FYyLe5ayudLQ498OhJ4ub&Szm$@^U^_{_b^yejgWDq0``uq3PeLOPC zdq^5q|HOA#V*l+uwR|CbY6kam$xZT0DuI=DztOepqEkY%#MkNx zlBMcE<}COY&aKm2^A(k(Q#_2~nh(A)q0~VB_4IN{V-C0I2WVl%FDs=9$B8U5hwIYA z7$J4le!m1UiofVaj-HYc*Pdrzz zn#oek*k)_V_Gwlpgw*>nls%UkrxtQA8$)^VD5!lJdsgOzwWPDZe3y)-DPO+w``ozp z&F(OProMzWR}>48?f{q?ekB0R`(*V$U`e6x1qjfA?@-zaa|WzVjeMP&9U0# z6wT?6<#G}70+IN0Y9x|GGC!k+sW4xZVnMx8s$y+6&%G44D8%`5xG3FBw46fr(a{1| zvZp&iJ<@k4Q?~ES>?Xp9Kip48T5#8_jL-Lmu<&>dm9u-#;)PeMuFbw_sBFB*4NRJg zLS0uBqWC}UZx*_xFQ8-eywLJWTJ3AfDZeJqqGgg+!|uJ}@zm<>U%MBb!4T{hpM|dASk+xDL^ipeP~)YiLGN)p zc5aUWl2wsI;nMf=6#pmT#*YB^=N7%k?N~zYTT1Ui4=;KT;mM;JYv<`bz}j6)^3F^v zLafSMUizzw`LL?9BmL4v+JBEiFEa!!&oPYWZ$(3MD`V_Qx_T{F?{PCJ-9}lA=YI)b zE4Jj<;8N*lT*x=1lpnLmWY%6V=knmfgfz_TFwQLbPtLW(bB~2F*Te;FkLW!fv^bC; zW4(vuyB`$vJO2L_`pb97s?O)DPP3@qV^kCmZdrrU&fF+6GwIKv#-OF>Vx8?&q~;k= zPN>2D{DTF+j%;%@F$#T?UsLE?+(l-w<W`6n#V-8!*nYngpL=LMQ{cyAGE)gLo7l+CP~08N z@P3^6jJ0Ev@O_TwBzzVT?*mr*zC=$jh<-q9)rULEiT$$jtyRQ+4<49rXFh)?`KnAi zExQLYHxT#x%(kV!tC$aG>Ksd-aXKsQzY%RX7`n2`zH8>qe}B&xb);SQ053breH;dlUG&)S}{u@yF>26y33Zh&_uX->39DT*Q|7LZp40bJ2U~inpS%g0;2QBR|if zq#1s>j4m=?aZ?(luaGOt4vj3APlhht8j_)E2)i~OB?u=jk0UJO>`1ZvSLNXHbKYwF z0v{cP9!B(Tqv1l!bOiMzTx>q~a*+|J{}18Ri{P|hBcFT&M}3p<*os2F{}HXnG`R6} zq>smF%Omlb2d+=JXE6#}gwMRpKfwGY;g*5(vCI=>wxw;;j6%`60Dq~gTBjpjqR{8{5HOSZ>K3V>WdMc~n=#E_9JxzEu;!0IYMy8?8qxAVDA+1Q>$JjLNcrl`7l?uW1b6oV= zGm%9nlD70Sa?80QA29a$fx3B`T7DKD$lf!2?6d4@&hmMT#}weomUax4k4+B##8^r6 z)1#oBSX{-M7V4&rW>1DIhD&G5$C9?5^+3HTbw?p9FK3CNqiM-M+FX4XSmbEN9FfK=S4xf2 z(a@@m*L__ht$R2sL1Uu+f^D9K*Y=Y&U`wgNNI(4=yr`IddYqIjWu#?Q^E|}s1(N#d z>XmhMQ_ri{5;9!Ap0c{B{I7)1?&HkivBLGBkHzr8yisgn?^JH?DWASXf0KXdO10e7 zv;9(j*qXfaE%YvY&3w39n2vR+3AVKvhnBelc^f$iuDwLdLHlf*OMRbd)61p#(s?K@ z7Zwj+DbC`ti@NTaaFz8Z`*QbBxUC~Fb7JX)=tDx8M8eoIDD0i`%O=UutgapX(la)d za{4tJPkWP3Q!d*$!>$|-3b4Ha^nTnzRt zju)1B&PcRaj8bmJc?0r(R)#4aq7=Th;~uR|jyM?QD|}@+5DTdlr^-4v)>v zmDPkWk_cs{e7>CDG^#moSX^5Q|?6j z3FE2Dx@qZ>N?{J-y#5?ecygO?j|X%IPFQB)8JRL}3+K1O^7yE#Tp-OnfDUDHRpG>6 zdqkB5qn1-itny)OT~Vjm+Fy5N8d9#bm^X8CNxPrTsCxzXLa3S*S3DEz1+=Gp#l{Ks z+}VBxY~L_8hnJvVQuzTWI>=C73{354}wY+2fON3wc{w1%twNp#ez_O6J4_)^GoTckT zC>>Yer{ya(W2w1t?_)BRa&V;evSH{EwI_svW@c^$JGCt?5uYoEN&Ir#R$NY1#jDC+ zz8B8?ETt6EYV=!IiNzLTE^{qCP3 z&#~>>>}dRBn|4S~Qz+^AxQG^cUO}!BWs85}4f;_{BPA`P`4J&bm8un2$Ww6lBCI+S z@gboTJ)))kno4E47VhbH_zLh(MD`fKe{XKqw(*wOtEgNt%g3X<75k6dWiemyp2<3b zS^SSihrcT1T+?Y$m!kcA5DlrUYWqUd9}5PL{$mLm()U$EzJU4NNoZFuWgcK0(D2Nm z@Z)cw{RM=hq|wN}lB968RysP$39EHFv7e`tyn}Ix?wuY9ydb|8soy(s7r$4~yw1}Y ziC#k9E4(_A+?|XA!tXS&mQy)sxiN3vbOD1?rW;nhqB~isV*9~M> zm&9phdS<%VH|QP8mJ%&@Md$8EVW7yVP|KN_^UK3JtK8fwu}Vb zKHRpXrOXPhcokrN50nksG=y4aLJd&Xr&@rv`$bY?P~T!WZ@4eV%68hy5)w3_W+l;E zUSgk-J7prADCe@gBFQnVx0aQbp0qM?%f*$0q@owvo=~>cOtMPE_J-<`kec#Q#i>?V zbNNv#e5Z6LpdpZ1irK8ul6l6(p@hxqGV&?X#ETYL=$<$mjXuZr^PeQkAHH=h{N*-{ z&$&H%ZUQ)f@;D=o@x?Y@^a!k3=l2A=T(X^Hyo{8N{4Z6 z@hfh@$ucFzQ{2)Q|Fd{J>l&^xk#Gf^Ktg1u+Hj?wQd3F4=p=@uTM~}!B;>fgM{oX# zbf20&fGTS>$axHe)Gkv1`$2Rb)?kO zzO#1u?M%1GulUOF>=<~moa~r;X-@z}z=xqs{s(iv#7(~-^sJmJ!9j9k+c%|`_d@tZ z`4=jRuH@LvmQv)C{D%jH7?Q5#;7l9Rxm{#oxu{X)L%J&AgJ>iPrTZ87iD%vLOVhI24uD%T!>d(>YPaxK6d`E8qcMogI(;93j zZ;^ZlG@o(JQr6NU%hDph=B6dY7ufUaoj4%}1^Q=PS79n~)I3P==vPbnlFyo4>tO-@ zFw-QAzYi!nnQe7c+7ft2pu-d`*fvJ6_4yVkP`O9RULiskN^epQ zMZ@NobdswGj-)L1^8COA2^{YDSf$Q3?gBxcS$U{4lXT!*Z}38^nG7J`}N5A53E1^MmR(`w*mw#z4hm%Fc4c? z9q|%09+r^SP`192w~Ad-Ln=2)E3>%tSuAXYZo7oxJpP5I_u=lUZV8%>Tb=rpf0U=_ z*+|XO_l=gG14Q-Aw#Xviz>nWidJxI@TS)-dCA2kYoz-AEDv_}UvBvNwD=3H1e-D5Y zWBtnV5XyIf?Pa43>gu!Gvu{&bg%zGlrsu1PzEJWK#*f0O3%#A_aFGAWoA^Xj8;7b0 z>SXq@)UTYPZAjrAhJ592Jv`mTjoFJ^y17Zar&2d#5q!4TvRHLgU6x4B{`dh zbhuPgQHdG~k-F9qmc-)AKe4@shL%zg+AVhNV;Y#A>e0Y7<3Epk*b)@~q;_7zM~`kH zaCqcUlYi)QHea~`6#fx(xI(#y)|!>gB=typRG5>frjW}g#Hqtkv%Fn30+(6@g>I63o2v;;HY)F0DB&SLcUE~R%Pdyu|n%=;Hccd}CbYvk@vWFH57dlDrh zwlCmAln(u2Op&DX2v}#Rk5~c~dVYg@uIxofUZlyqrfQj{3ucWm%hqTzTzOcv94q*S zcaTmOF+1@!@GQPl`*-kvXyfO|&vS8~72~yI1>6$rdysx~Rz}?Dv-n2AW&3!QjKVpa z%pY+7n0rRJ7f9OZJKR@rzsmg=?vkryEFJ4PWYfcZf{PgX&N@M5oG_-Fj2O0 zQ5!N!c*wq$dd6#O2|JJbE9&J9exeN%4l@Iac>_2~|8r`8vVI@sr4rH)f;;k0rb9_t zIS84N5MWPU@pvhsm3X9O!`V3im8jT#!1sI-Nqd$ylGsYaj4W9h4t>t2)+R%XFYrYuw*W#Q&O}3wW-}a#5Pl$KG$+@Bme}=yy@>KoVLz%&Fv;|Pgg`{|?o&xv^Gd1#lOXd#EOZOO??iU|v-|3zHA}u|9Q!Wv` zMgLJvK-r1DKa2qfmobNMWO2zn%uR2r{ZlBTl^K)RwkSB1d)l}>Lw+MQoYcEUGr>{L z;(yYgM%beHk3%J)%w2@PfjYH)z;w%p=v`LR&%pEKQtq-;>6QFt&fup0d1@OID7rG| zQjS;48@l*=g~vtPh2SvwLcuYRSJn5i){A}93(13u;pG+e`{sw z-2N(Tm$RcDB{!zL!I?;-k}FE5lCY)2L^)BuClU`O8EOMVU3U~WhvQY4$0%;@fAoda z!V>yoI|2~C`3|i%cg`g|Nk!`{ThdSvw4c1F54-9h@8p$a7`{tw66BU!?ptnnc_UZK zT?>7xc7fx4MIRt}n?b$)!1umQZz2{EUIa4~WJ|)9cX52S{S@~je#irg`^QqR{mQ8Q zD)*p$MD4J6Z0nlegGG7fKaW1jVG+OlI?E4VuADW;!mZ?c6ZJvKic}JExUj{Aye`JC z{>rL317jKGH)jr5{?w z4!WsOyR16>i1~xjgn0#*hxkn*o?VYRTl+AP&1Iw>&7;o(hJNft>`CCZvZ&1PeG$5U zo0%5|3I9k-Qg*A(;frObqRc=q;HkY$lxEb%>0w-mkJ7Xl^*^uY zv67(;ng3LYDlF}Mzf$kydJYnjSI8U-(x{Dre27%5-zVRseP=0Lv3s$W75AZ0 z4J^v`(fj7#Is`w;S4ekL;oyLGDOYcQlsh^W3uhUp_8t0*a(W+XY{V$prxQxDH?RN ze%fMILMzL4yc2$ry4pVMI}NS9AZ4&vLt#CKU&!0k|DRx+t)KA2V%ji-)U zg5;f0{}6}hD|JLFqf*UXsrX@zrH(=g)`pDoksmv9kMKzwiD3E9eM|aSIG!5|->p5- zklKkaIo7w(Us~XDX=Qrr7mAN<7dA{7Qes8zR%Gv-eC31;X(i$!Nk4_Ri^{OTry_@t zCy+k9qEeR4OUS83-c~w^kmbHvSLi=W(S%>b$UdS_!7KHxoFT#y|7-tBi=qAKiZ!4z z)Dy?64ANXl%M-oy8eYY|J3<-r%1jR=31(cx2TccSPbnMLjq*nCj9en4^QcC(T;+Rl z6D}=f9o^p$OGh}`CKaFJlyD{5X58aM`6~6F&F>4PLKJuBEP=<-n@mUU5E%=J4*kPK z9hnXXmV1)K%#4oXORi$x3|x&dY>Xuzvb7QMTJhBum!QJ@7}0xSJx28rg^2!3x5+3( z<^OVC9<3{E1(aT*O%+c+GnE+gB!$A3FMGMH){43q2dtVe32QIgrUg!>6MC-ozrr51 zvLvFG9=C{6@rsvFZskmW zyh}@${#fph(9bPr6!JV{jTclVS;h%t(r@UR8I}7!4mCWVseVdk2HwVBxQ?s`QHft$ zs`l@a<7Dp!|D6+!g2A7lP7SFepQwcGxh_!iebxPL{Wj>8v}f}zST!n$D$=3JxLmH$hxz*zj9X!OO}Sg5u5 z6-p_|@(oAMe2dmYv3Qwz#axTIMRi)tuX4AD6KXpf&DY8On#50X52eFItrU*2B}OHe z%zNp$OZ1^%(IdalyUef249jOawvZ8_&^Kj7i=9j8_i1m7@k35m*+g_?V!Qhd;UyB9 z-zpgeAgic0PeMv=gc{-%AYjP$K+!L*j7 z#CTsy-7ccuBQ5$O+^yb&lF@HbzEVW{DDs&Q5k3dv$JAx z{X}6)|HCaLr}HJ(^j!;~j8&F@gj`a-nlF_rbWijMe1rNyLrMC;o_RJ-NFmBqLp&Cn zVnojZUde%s`K+W~5GoE|2}J87&tMcAdZ7+AENOKwjw3GJLOxQO9*g5DRePF3N$;VvPdm(KmW9a&mS7VpL@{RqFmJuokh+{ zGt@oZ?Q6!km%F!^N$%b5edb+vq&w1l;*N4hn@`<`-G|L=_fdC@`P_Zneca4($GQ{E zT=xa{J+s*T$o>)+~6a9a5j{fW-W z{v>~@)7^i~f6Y15pXR^eoaMjizv-OgPxogy=lbvX?>OiAANg~g^Zj}LO6PKawZGaK z5;%eD+#RF?-?=B)AlSkg7Hkvj?u<1l^QzOu>0(mONzO@T8>g$&-E8dia85PHJ3XDA z#&b?{PBVKsr#t7F-#F(x7n&WMUQRD_u+!V=4SbPvDY5!GSDL!cRnArBK&PM6ANXqL z8nc;mt#d6QuXC;=JoV#!t;tU~&cRLT5M$RZ_6uu8S58^x884di9^N6YAJnB4V)^{Fv zo-kWDW1ZhqTN9iKW&`IL=NYq|^Q`kMx%z`M5%@XhdE&m{yhz+h&LrTMoR@)LaVDFs zoGH!}T&6lxfnRlA#r-wsHC$eIUN;S#Y0g`?Om}9O{hYU*cX65N%pzBxIdgEI>wIZ^ zXP)yHaxx!UsSB+vGKV;SbN+4)bG~u@VLCbAI^R;N|8%}1)>3DwIoA2!`N4E^mO0Bz zYv)JjN7Kbw?koq-3TK5`&shnzZ3?xWZJN2~xaXQZ+~2ytH9g$(-1E#n?)h#n)7)h+jDeevK4W_+&qkALpK=)=a+yd<$2kqW% z4swUO_n6(?d)<4@G43#T7$Jwd!%Yi!1eCqIQuaYm_JiOY4Snyc^nDQY{U|PDpz`KQ zELF(l-b2|J=g5)d0yJ= zMEhCa9O~8fHZ(_gb-X&bZ{%%kcJnsz>X|*fO}+Z2ySJIQnK{ne+}qq7;5G0Xn66$! zuc6tMR(F?i{2~4j%-FaB3%BY%PaS5w<{`da(W<&o6|3|aEzuaF=iYxq;#DYhdG(5sFO@dUA0(Juz zI2~a9JMaVFG=ytxX!Z~41a*No4(gl6!DhkcrgqRE*aCQ)U>j3E*f!XfSlb2Ln;_UB z*uiuRb_|-CBZHlST?n&lu$wtKXdW~-ZGsj-3v*J?GH6N2-GkjtEjZA2?8Inl9)q)< zXfB5b^)L@Or^1OIP)>BEa-!?tLA_0XmO&(bEfkVtyEfd zKW)#owI$cqmb{I&@Os*YH`O-WP}}f8 zZNpn=8y-j-?oBJY$i2u6bT6hoH`n$&P}}on+Mcg*FQYv-q&?qgZqPQ|O55<3?ya=d zfwa{T=3)0fT5Kcte%k8h+E%Z3A9A0-SNflZ?sK%pf!Z1yYHMtyt#P2X#>Uzj8`2tQ znrmr??-TL^+Tm92$L?%X-~HVE9QX_O3*6_pbAad4G6!nQY~;>!7XW|l{?%;lE_4^- zyU1NcPX6Zp4fnEX5a+MCf{XRovA;2q~3XS#UDd&iqm-U;3b<`nNl??f}y>*94WoxGF0lL*t*>uS1r z{NN@hdncRYz3yIj+h6U$3uu#JkM94ES>Ia`Uuz zg?9z;mEM)6qj!~el^O5#_xhV#y=%Q|&0*dEZ-Cj?yWYFe-09u!-EP`=cX)Tg%kJ{- z0v_TGF$a2gd-s^O-o4&1bFequ8;;8eZv-y)dH0(ey^-E1bCdU=H`)yH9`znI`*>r# zF=mAKnD-d)E8b*tkT=DfV%mAHdavU0y7xLR)4VrvdE0y29O=#X zzB4y_-=|Y%|FoOl#I#P=PuDk3rW>Rim_MW&rW=|k(v8xM%roi6>BhiYq_;3*(_5yu z1l}sWl{qxsB;CYxPj8dn#vGpBF1?-EFTH(wM{`%YdAhkdI^8n8r`bE*D!mu(d#Cp{ z$EEj4?*qJVdOzIvPw#JzRla`(e1Di3ru==l^7pHhzYl=FKZ^Spxcn7x`6tcQ%Gv8F zXWvyhdp-Xd`1mmR_+&HOp8_|pquhL$|0;ZZeg9AXd-w|1-oXD7emzY2^#;nX>nOjj ztNeOf<<~y^dO5s)g};K3E8*95{Ga@vfL8^^q~O{qGfesQ@W6v#4^w_!CkTStz{0hg z!nNxFZxn1~)(`5!!G|daucsV*1Lfeuf-Qrs;l;woPgOo%NBQ_L<>MPDAKza2_)f~l z_kfSLFz|0Ucpc^7UeG>hZ~6vD1V@;Mg04YVb9T@z=w|u^CkH2+2ZQcGcXL_LBj{o7 z4NeJ8F&74>2B#9HXVBAJ8k`oKW-bU$4^B512WJFl;C^Osra31#D>%zc49*VDHY0;` zf^*Cz!MVY?=Jw#c;5_qGa6xbZ&kKVK&F_O=K`-FmL2q+ja8YoPnGo~|`j|U{zCmAe zesFnkxj8+!BDl)j9SjHtm@|UwgMsGE;HF@ZIV%_(494Z=;AUKI32rsF1^mou!R^8A z=KkQ0;11wBgFAum3hpw!g1duzOwZunU>G=u2gA)p!H8f4@O{DkxQ_}(ncl&J!Gq@9 zV019r{5E(fc*u-1sRP>5!$4_kJu@vG;$E`c<?+QNML#aT|H!BQ7bQx0JsQ=?&!27PRAzFI;}#3akMjR4F5`e_>ie@i zmq+Jl{QUX(rKG@D-_ISj4Oa3cb4G~_ir;x#LTD@#@f#GK@*VrjXy6NevY&N6E1hIb zg6vXbEiCgy^Kf6tx&m1nD_pQfDO8%1br(+d39Y+J>*^=!N9ydZu!a-wk`p@%R^2?o z`(`s+*?_%bvL95|+5Jnb#{G9(nZx3IazgIJ=uKxIKh2yi@8K-trFQ{f48t{{$|bjiP0Y1{PW2OFO?` z96wxEb{vi2mK-i&4Up`BE^@j;@%Zc7HFM6oOOY8r+2eaHx3rCO=)d2@LG zC0>6~ot&{2WP+}}x&aJtfwc+=j+G%lW&Tc!l6|JjlPfB!lj_y zrrCe%2Rtea#qOgJnjm)1^t^7d#icXP;zR3^HC^#~h=P`?Gr+%yo+q+MY`^H|ebNid z>6*82OI_Us&zOnJEy!ULxgTZq60%**B;3s1w1=_89?AU|*4TxVQ2vXUnO{Ko59IWM z|70(dBg0NsdxY7Hvm0)7_J#9ySivy}YMX-{?$Mc-W1nIWVO}QmvShgObNGc=bk*W` zc>L(dPbvORd9$l-!d>AXC)eOaVe|jH_%nF0X0tDM)Soqtl0Xg|0o)Y3lcIAPAy4L-_a*ezakjEcB@kq}5Q;La|1HVo^X3fr@?^MH zXU_h4Oi$m-3H0;4z&rO9NyuWyZf8rY8&Sz+nZ6@Kkw1~7?$6Z4rW>cg>rdUnc;fqU z-U?B^iqQMU`BpsG=d7GFXPCd@UQ1W9M#`g`MHI=_ZBzKIu}mgJL2}}h>nr5v9*T6za6EAasc6a$0t+c zJ32gB+2VRHGhVU1m!Ddd@4GTiwDw&Fv)AcD=DNmYTC#`k3tVN-(R~&Je0?&|J~}En zSI2WVTCQdHp-Cw82#?CC+w=mrTgb*$zfL@NPrS<*bZFxJHsC-<^m9q68L9WBuOCk- ztZ)9WyK{k#qPoNQf9{=~1U8ZKkc99s4@e?P5D_d$1j1WD5&=UnV8TNx1Z=Uz62Kw? z3W{QFeAHqEthOqkXi!vC;sdOrtzyAi-+DY+U+462utu4F_nV#UZrn}Q0QR)AXJ)_q zoS8c__x|V3+~2gq$hU;vH4o?4SmEw$A?6sZlGkJ6&M+y$y(fshnZ!HzN3{MSI{^E) zF6wA3bmuaCsI(fdJz;krSP34>x58ZlY8SY&N4`Lh;q#$4?pI*E1Ma2cPaYwjoWfPO z-#0y)AGz0fs!=NMXas%V$i8jwi>(}!{TzUW0mf#rNr#cW3*(UQOhlP!P(q4!>Ib z*ZTcu2_=k(gtNuBb$L$!_aS51@k}!3(wE*9fxV2nn!Gp>8A19vg;hg73EyvEJzs|N ziO&D%T+MwM>*#VnHmQ%#(eSseB}XHCEd5-L&40^}MyxaM*lZK_)r^e$#J`%|foKO` zGI{e$FdxEwBWlJSaAOHqXQ)HC&pUuxV1s`R;yKP0_%-^LBitL9(+2;pE0oUxul7x3 z58T;U-!#nmxJC}*SL%~^@jS$knjS{jK~u6IH9oHW*eX$Yw5PQ#a>GD9ujhDc)k%oKX-Nfu$G13I32**il?RD zO^+wT%P`krA47~}KaS}mLmDj2k6QRK|tPn>zCwAe%V-4crQ zac6}}EykG~8830oHJdX%#ydzjFV4eU=kFmjekN}m3ZSprg zXQ9!|;n>m2#9-_<&HYKsCi4|6&V%r8YMlk<`xa&j-P1MnTs8O@tFOZEb2ViWo|?(*I?>o9IUO%AZ{a-C^a zV@-@>0?ga)NEbc%Wx{({Q(93+JUVW)%N>c>W;XZ8n81lX6=TVonb6jUo(%YF_7eDC zv5lXcTiEE_@wXPW(_IW_yKV9aUDYx6IMy1_A8yh#Je7|X=fC4;p22&w_Xa|&t%xqk zSTW-Hp&kq!jd(E{`};LjWB;Rbqj$iZnK6!*S)Xa+o6aA#OZ2rJ;jng4MmpD+$v&t%E9eTiyN`iH9j-l<>uPJHb(V~#7>J?u(~6|{5m+&@B3+5vund?Lan*~ zBsm;FY7Mrap`;>$j+^wh=D215KbLVli>T#05_xUJ&*j)02lg*CJmbWdIx_z8_oMj`)RyFlA|KWoo}KRHX|^B!ubW&FT}wEg zx4lhofgc-Nf%{r7Zn#bKeVWcA92#9Oh5fPh5iK_E)IWE>jSM}W8b=9VLF4=t)?AOL z+=t-8vl`B<3tPH=yi<#@1IP*YLr}0g5}zVVB=JZW#jlk$!j0-&yf5Fm}Fj`gPHcM~9+5IVe zcqi(<1NC=5B-Vepv`tUn_iM$_SK824hV${);$K0&JKjI;hZDNn3ey_n<#g=)ao)U* zxel|jTK3|7S>sa&jaC@M{oGDvCpc>7+%H>{lCtagE=o;mc%YAFheQV2oYfIKeYf>)KNV!NS z~BYWU?GH$+754jzuO)^Ib_L$KnKXEZUG`@gtUOdzNf_mTY^{ zDN;zMP~=hs$fZajks^geiU7+xk7eD1Wu3>e?!mIoV_COjS+`?Zw@Tn)C^gO<)v~ZzAm{^R^dIN)p8?zYGf0Z zo54e{Wv~1h`UU)Kc~M@2{+qljkH~xSF~0u!34VR#pH?fm!)k4%NQRYawFfz?g9RFq zm1bpv_LXJjNjEFs>M7~gDb^`qb@j4(fvZ(u6@b9i$LfP+U#qX2ZuPVJiM9q<1K?9= z6+#cOhDdK~s5KPCo+7IVjG$rGF!&F*hD(7p!WtoWTBEE{h+k|KBh+w;l0i=^ml|uj zH62SsOj}D%(IgVnYDg=ZNLtZiP|}toj-eGPaL`sl8)ngEWELfZdUgYR47tbx*K9Mi zp_p0Z7b)_Keo21O2J(wqS+!Oz;uwmNBE{$z)-G!od?kjrYwI9hD&w+=!Z($OOF%WB9wvdAy1A@#^2_2>dpkEWA)luYW; zg`^%;l6o|o)T3FX9w}0f9wPOq*!tS~8YH!Pt6nm#Z>(>izqO7)yOt{_DN$ff0rq1< z2UJ_=cB%vPiK-)Xno5ICSLxDSby6Vts0@_>-C1>k?y9;%cT-u=*(w`4NA-q2O`Q(i zSDgXfPxXWDuLelA8mP{K9;6E4f3_L}eU2InJx<}<;A*^@2t7$nf}X4=xJ&i^mJ7LU8yReXQ&y_Gu14~QnS@X&~wyW_|H>S(DT*LvED^$5tdi0HL^~v zRX0Q5s_wvYliDUZ>V5?-n0i2gQm3}7?a&XZ2caKQ4?)+eN1%7AozRb}r=WMM-OzRF z8R%!#v(V3}J4q&|p)j{aj)$7o2 zs5h{DTfK+n`|5qkRUfDiBu{;)K8F58eG2`V`V9JWbx4+~!|E`WU#YL;UiEMFwcMxb zRXy}K>Ik%}T)7ujg)R5#fKGx=)~%s~ItZPjPk?TtPlWEMJ3^=FG|AWLIvs?&PP!9x zhRy;lE?aj8_pOKS0iRr*i_koMGIUSf3%Wq}hCWUA0ZZ<5-51QeGjxCGGxY$lrv_>q zYduI00*$Ir7fL%lSPzyCdWaqZlGRW>RNCtzT_k7eVS1P>(Zls{Q0+$O5%MQJQjY|m zYLp%&&*{;6wDi|w^cbW)N1r1D^;kU?IgHceWVs%%$IF@eTs;9k#kyFo&?UM=?$PJz z^WZa4PXz65lAZ*g$$Byhmt&37^o591p(~K8Qdfp}f3;+V zrIHmEBr9w(Sz$r4!e){c79=ZdCRt%Yvcg)E74~~=SYfGTh0P%=tPNRV=aUr{Bqgkx zl&~G7giRqOEJ#XNDk)(>Qo>S639BY0EJ#Y&38aLjYC{Q2AtfwGN?0i=VYibKR!T}( zkd&}eQo@3ygjJIg79=HXDk)*5q=Z$I5*8#SYzHY}K~lonkP;RoCG25R!h+<31xW@A zk_?tgGFXsguvC)4%1H*RCK+rR$zZi4gH@0Wb`i;7Ge`z2CmF1@-P!IUC3aUE^@-ih zJ_$P0&V^#^I`F1{ZPrE0Ur`o3?w7@Qqb#`yN4=gXk8T&Cg zV?Uvlv!0x>Uy(ERC@r2pk~8*Oa>o8Z&e(c##xABMR7cL(UF3|dCui(7a>jl~&e*R> z73)i$*sbJ=-AJC;Z)hQ1M+@nCT1eN?LfS|R=@wc@n`t3!q=j?~Eu@=iA>Bl)=vvxC z*V;SnogkYTe%DUgJ)3Cp?4-rBiT2Gh+BcU6(gW#I5-1Ezl_7x(1GA+lur_c5miGl7 zmyv;8fhXkgz&{+3A_oTv%QjAi>~cCg*`N^SI5{%T>FyNC7H60b< zSq$!BwNou)oh8l^8SE@|mcoCTvrNV~mphls1I}`1IpVKz)*#d{9|w{8HJ{Y4Ye@Z? zN8Z;U^1kMh_jMI{Uq2=9>nf7I=9Bd0kny#ObguxpUIB8w9CE!@lk2sBM6W`!yc|-z zR*>H{pX4rw%&scZx>k|Xbrm^X0TQ~dB%y0130)4^TmiDVt|pu760*4#lFjuqvbh|x zxgH~%YXaF^m(r$unryB$WOMDNefbpGTu+kC^(<}8rz6;0&(Pw0npCcHX<2TiWx0)( Z<^8lQx6-oQM$7UE@~^g&fAyfqe*n9Yy8i$G literal 0 HcmV?d00001 diff --git a/wavefront/client/public/logo.svg b/wavefront/client/public/logo.svg new file mode 100644 index 00000000..19dd1eb9 --- /dev/null +++ b/wavefront/client/public/logo.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/wavefront/client/server.cjs b/wavefront/client/server.cjs new file mode 100644 index 00000000..a296ad6f --- /dev/null +++ b/wavefront/client/server.cjs @@ -0,0 +1,82 @@ +const compression = require('compression'); +const crypto = require('crypto'); +const express = require('express'); +const path = require('path'); + +const PORT = process.env.PORT || 3000; +const NAMESPACE = process.env.NAMESPACE || 'staging-aws'; +const BASE_URL = process.env.BASE_URL || `https://${NAMESPACE}.rootflo.ai/wavefront`; +const APP_ENV = process.env.APP_ENV || 'production'; +const FEATURE_API_SERVICES = process.env.FEATURE_API_SERVICES || 'false'; + +const app = express(); + +// Enable compression for all responses +app.use(compression()); + +const staticOptions = { + maxAge: '7d', + etag: true, + lastModified: true, +}; + +app.get('/config.js', (req, res) => { + const config = { + BASE_URL, + APP_ENV, + FEATURE_API_SERVICES, + }; + + res.setHeader('Content-Type', 'application/javascript'); + res.set({ + 'X-Content-Type-Options': 'nosniff', + 'Strict-Transport-Security': 'max-age=63072000; includeSubDomains; preload', + }); + res.send(`window.__APP_CONFIG__ = ${JSON.stringify(config)};`); +}); + +app.use((req, res, next) => { + if (req.url === '/' || req.url.endsWith('index.html')) { + //Stop caching index.html + return next(); + } + res.set({ + 'X-Content-Type-Options': 'nosniff', + 'Strict-Transport-Security': 'max-age=63072000; includeSubDomains; preload', + }); + express.static(path.join(__dirname, 'dist'), staticOptions)(req, res, next); +}); + +app.get('*', (req, res) => { + // Generate nonce for CSP + const nonce = crypto.randomBytes(16).toString('base64'); + const defaultSrc = `'self'`; + const scriptSrc = `'self' 'nonce-${nonce}'`; + const styleSrc = `'self' 'unsafe-inline'`; + const mediaSrc = `'self' https://storage.googleapis.com https://*.s3.amazonaws.com`; + const frameAncestors = 'none'; + const imgSrc = `'self' https://storage.googleapis.com data:`; + + res.set({ + 'Cache-Control': 'no-store, no-cache, must-revalidate, private', + Pragma: 'no-cache', + Expires: '0', + 'Content-Security-Policy': `default-src ${defaultSrc}; script-src ${scriptSrc}; style-src ${styleSrc}; frame-ancestors ${frameAncestors}; img-src ${imgSrc}; media-src ${mediaSrc}`, + 'X-Content-Type-Options': 'nosniff', + 'Strict-Transport-Security': 'max-age=63072000; includeSubDomains; preload', + }); + res.setHeader('X-Frame-Options', 'DENY'); + + // Read the index.html file + const indexPath = path.join(__dirname, 'dist', 'index.html'); + const indexContent = require('fs').readFileSync(indexPath, 'utf8'); + + // Add nonce to script tags + const modifiedContent = indexContent.replace(/ + Rootflo Console + + + +
+ + + + \ No newline at end of file diff --git a/wavefront/client/public/favicon.png b/wavefront/client/public/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..d82b7e066f671c473ed20ede2991dd4e65679419 GIT binary patch literal 386 zcmeAS@N?(olHy`uVBq!ia0vp^G9b*s3?yAI>n{URW&u7SuI>dsi&Fn%qNN!i!DZS1 z=g0p4_5J_mmj7=a{=cy0|K2J8@16di#TR}VXaZMBkY6x^Mf>5G{|_bn7xUtawg-xS z^mK6ysbFlq%sQ#nK!7Dch1XVS_y5%L3AavtKUi5f^Ivk@HI^m0GnYTV!F-^jV#(h| z6_5YQ5p6>HQkDNw7N6>N6$n51@~YAVzT587rT=GqJZ07IwyJ4^_w!823pSU%R(`Z7 zm}#cLV0`I}m$UY?y9^rFGzCpBRh=%ZydJcr>`T-^lg4Kytqu%Fc`$@JZ&y99YQ8s*2m|3(yWxx2>9kSa*Ht^kk$RGJz?19|< S&B{Q}GkCiCxvX#_;%1P{8{ z!tGymbyas&H&buAs;7Fo`n~RcZ@w=^TT}Tt9xWaK0C=vdqNt0K(x@Se^9=Qr-ygg} z2}TvBa_F32yt=U zy@Dw0h6nw=&ktlxK7frum_A*Ble${~c=gTA3=H2&7HOoeD); zzIeS014IrKA0Hoo@9KKr+1c6B(c!$bv}71kWVE!r+=Tk-%GJZe?S=2z=Fgu+7y=_< zV>U@|hK6MxJtaaOyUe>FI_Dr5Ra}vgkr74TIPJ%%0j9vfK$Nqc^>x`(ik7w0wY6j4 zwPok#NIR24zSyt^9?n>Fl6G2~u7mug`+lmPfBhR9?`jC>65eJ~7~|VEqdUtjz~NiR zORcUpHUj{+<+d4)lcPV43;W*A8@yx0SRT#OPpS2Jd3mJr$#i0F9DR$6rZ&d~1q>Ds zQr(tXZ%awSe^`C3BI0j;Cex+Q69V2CjRINpo6-GX`VK%7!a(CKcH}nYDClu;(w1H` z<16yQntI%L`M3ebFywwGKs|^=B8NtAV@=Quxz{Z>_$@kY&CJZ2OVnFhT7n{53*Q80 zg0lEjdH*%Lz>UFRzba=j_tkkA%;3-JYUYN=S5*9QD%zo%6&EeOGyXv}$Udo#U;)QN z00~;kH#IrMhCY7-r+SCm)}PdJpEgYM(h$$Dwen5x$ldPPdoQGVFWwue*3SH?!oDC(=wY0aZK1Z5D`d$Pc0m=H{l!1SXqqfg?#{5izp->CAwH^xvBh+%{ z=46{ZchZ5eFJ!LX+>D%HB=JyiN82*0km@T6VCpcvRrpj!(AS_#dlB<7jK=tp=wovN z+TJ4!YGeeL)ardgo`&qcz~bqbLH9kF=j`;qupjRq7IK(WXy`q30(|Z!pGdzJ0sd_vS(+EBddKliJ$4xC5$Vv(?_2kh#hhGjwttEE4_Z?;Ta3DIIO4d{C_C?P_~aU35oN0FmYo z33ZAyClPk~DXyO2+aZD6$HC6EiyI_S(fuR{e7^|4u9$OD=-WD@mVH~AVoQ}iFrYyB z#L>4imW5_(V^ewtk2s~!7MioHWA9r!q3hZ|?LiKjg|r#fmN0)#<%un+qFGR-Ruo8O zcK*3eFHUqIFw-Te2n{%)-^4HDNjCg3j{9A|#BZ?w)p2TF*9}+5-5xlo3$}8z?f$sy zF1>hoMLtchDN^dAQ`o6u7Ao2|G!(h3{kgijI+sLSp~o`ho!8Cr%hxa{zyQnht8Fn0 zZpOE~$An!=N(iokfPOO!Sf0z^!RsB<_Rqz=seB0=qgrzynk2&52w-AqnYpsELc2J7UI6y@*x~J6KEu#A zw~?D57Q^%*R3zDzmZp+41OGxeiSRHn;=5`_Tjq4Af5=(>8bkDIFnK$MBn~a^W zY&iHRu;@`s25|zei!_@vP$KC@u*v%A{6m#gOKYo-4oQLazooT?pp&zf zQ|1HGG9@~fGzu#dni8)Kory5}!&F|-wO60HVWGT1z!&Lopiq!*ww-*FEmX*wARCJZ z(^|v^YS+h)+zWa7UIf`A>TabB_V<@KIXPKGwGMKUPUwA$tDDk zdf@J_s3UhptZjvDfTA9CRW$k(6`BM$>-U9_JM71mSG|P4tpuVc+qH*pb8lVZ#Dd>Y z6D&VI_FM+MVMutZ6?_0t@52zcnKx^Vfh9?#fuo zO$zSU$_1h~ER4CUnzTm~Md9xbZzl~R_V#Qnwx3hvz3SL6?q*foXB^LNCeFklv zQo8SvJ-IoFZ%dWeeh1F6Yk8jzNMz|w@T@PN^F?qtkGa?sFScTVe*LphthZ}iu*>@TS%TMK zLf!>0P!p?89$TTqBGHt=BiGt5aZ5Z(B8r@lH|T=kbeef3j_+aVw!7zHb1dH!9JsAP zw2Ju7AN+VR9R35IpL<&5>tVAai> zb}KFrU37a@Lz&Lo1luevx;sK9Jw+8EYfv?g+EuMH#~WH&*cuud1ml6u@j58ygA#~R zb}6EV){IFyQZEPQYN#=*lkuR#ORXL^>P(W)xpRgp)032bC0|30epZ(}v&8Cch!rZu zyzY2oc}ye_^Bfin&!06N{hPi`Dza^ZIVLS#UH>Otynr+*$$w0t!hdRMs`qe323gxH zBQEY}Z0YCg`+I3=YkeIc8xVXOK5%^}zhR{jw#4#E#1kgbVq5H5STwJ0FQN6}KyY4! z6GIh`V+mrU-4Q1LhAs`r-AL2+-T)lh;V-)P`hX0tiDSA%aSWYPt?9iT`F1mGb1VzI zG5I@~S4?qm9CfI`Vjzr)`}2p4y#ujARB4!L`s*%9#qiU(O;{N^vuU637i;-h2b^f* zp46)XHdd7y0`NL)Ly1$LbN-AWDoO&Zm9_t1*@b`vK;fp=1-KjF#S1$pF-9c+GfpH2CB77jisEoIUqiri6=P2GNsJs_pr;k z{k$E)4fo%_D;TCa_o)PFcuu})SeEve{2@N9TFvXMh(5DPzexR2r9Sj$Te9gF^aer@ zJ_&9Ng|$IL7mg+AGQLJw)&U1>PItKVg{Huj3U3=5sQMxrGT;7CppZYDABI>@{YQ>P zZgSJGeTquk(%vWKmm%A$2Ou^0ZBF9pM8sqe{X}#oKIT}FOEw%UoUPiP5CrVl@EQY3 z-8Q3>a^=fzG-SZ^qAQ4%bL!^iOh{+_iL)NMYPaV}4h37UpG-M>d}GkNlw&WYEPH~Z z3`2Hef_wSP)>K;IhpOYjUo+b&i zRfX1d=gaosD>~)HQv|TlugSt)J1Z!&X6bT>vLE7#q}F>zZQ~qjtZtICtrALOnHIqi zrq%v3f+GSa0>>`?hMIGJMpuh7F%x3?=b%zmN46HGly-%MZO9GwlHU;&B;^`peezSv zX_Lz^ayuq?nlS5qqGt^7@L()=e{)aUARyO4Y_U~qG}PVZ*KBK-?qnuOloG#gucfI6k`WQvgAV7$CAKA$*w zl6LIC-aYzHt%|1j?Imi~luTJ=tNHy+`ax>qX2mLN@#aXI1JFE^G3hEJu@|`to)@3e zNB7lL=q0K|i++6-XLrty(6^ZyxDNAqUPPAN!Cl9!YIT~{CtAnd#o-*OkQOR3PNJsz zXr4W-tLzDpt?os4jyFq;QM!IUq?GYjn?>Pu7#mB~tgRngq*}n!C!B#wb``~Zg@RYM zRM~I+g&2EQ+={$_u)Y$XP7bpC8Dl9C`m)NwvdPvQhh$CCN(s`Ka*#)+m1XRv1_w+h zd)k)}47rBm)fVj+8z}j|E)}Wy?a8|9Fj&=Z4ZvW30pqJ8e6<^C&!Azw_yVX~%G?>M zdJWYX^^JlPB^{j=J^h^P;fO8P^s$s29;a#nxPZ`M{_ji4eZ=F+$W7_abJL7F_+fTM zlay~cOj%2@wSXP&SkEc!vNzAjS@f4pP1Qb?XX1~lLX_?1E|uGiR!p6^YZ*A4{^8s`g1J z?QhEE0%YR+CoBrB`nnkGOKyDWD!IPEfXp=?-J4A-lH^wuenJ0z*+~B6lOqC@d(MJ4 z**-6N%^HDa9_d>g(}kz8?~nrR8k~{$2!G)hd;_-8{HHvuDwj#TnUuuQv>gExQDo2| z@R0~t4}@~Ymt%h_1f7^pjtT>NwblVd5Xr>&cQPR)&} zO-$k#?K)&WNIO;ZumtkXh?mw!$E_=I=yS|GJKL)y6i#QUj{NT%l8M1AAf zwYhPi%g^~+F24OjUggxL@}i_2gRVl^u`@41WcP`R1;2(AHw~AN>20}*eZ}zHmp*guYH=5vBBCL3kp^`rl%~S#; z)w^NGEZup%b+%zZbR<=wF9gE+X&uK29yzrLM^{X>KFIWXTy^>}FJY-S#W-#pM*+qE zge?|5@Z!WHtZ{HwnWJ@7G!Rp~*29p5YVk#Gj2f1$Ch>RT8RACLRK&}ZQ}9*V$o8#U@4(}X+&TWpjz!uHdo(<_rtJ%(_Sx-yiB9VJ z%zLEEK^vRet%XC3sWl%d{yS62XgwZH&%rN}g{pn5Cz^IO$(YGyJXpe!`edK7CB<=Q zv2Y{Rxax%!Srp|3%GA(1lS{&lZ9j;w&bbF|(bak19X}8$;qjiCYCFm?VU?GOK(8TJ z5YF5?iJC(BH_~erii0}qPEK;ajmzaLwdK_{-ipX+czkhU^$F4qnbEy7YlYkRCON9& zj2WrRI}4ac?^mJoDSA^DS9vfoGchvB<9z*rUv#@g_24L6@;+mVJHNUg6Z~!6mJmVy zs`(va8}9Xmy;N>cjAMFtj}$FZZ_JTEBO6#Utd(t+;l0MVXw1& zFPxS;z5%}P;jui>lcL2sb7D7mJ!kll{u$=Ts~6}%6M8)!JUtEP5v35lGgWkJO2L%u zL32VQv^^}=FGqZAC2>r$2JimdQKADC%>{;FXN}m{*f%qdwsYE0XMgDJp7Ij5{#mr! z_vKIZY+hsX_=^JmHn#z0F}9vg(<$i{8vFsP$FZ5M89c1CQ9bgdBF8)v@(!CrKi}V) zQ{ey&J{gR6N2Q+`3k&NLb6Fb{H?jnMYS8d1*<{6$hO{dHl_A@!IpNxKR3cDu|vT*8e&`>!;LdG`{P zk47VEKo6gnU}jo!*gQ?__b{$?d0F{B`w@7x_A&HoD&$lT8N5cmK3fjzq+9d7)6=w~ zCCoOIYHHG`WaV|$aaP?|8@($`&E)1R|9ospeskHVJ3X7KJE|C8Rt@#apDoOto(`%L z*JBG#eOS=tLC{R{(ee$EqgO|d)o@WU_f$=rM2vF<;oNYo+Z*(t*H%qC$vOxLW17du zC%!FucC^0U7sT+UWYED-Y@zUXZ~MSuT9%2MyLLyp@cECG&aw~uM;f|gMyD~zO^*Y| zHA#El5`}IP9{~Dv8Fjq=QoMCuX*!!OE4!njQE1 z50=Q&o0cCO)Hw61GkAL+59su573E5)}F z?-sj=f~LM0;Mp|d8L?KoP$YBJ6>m;@Ka5WBKO{MzMGi0p+u5-i8tkCat+rl&Z@$-* z?s!@}RIi5CG6a2$GHm&`=q!5SGNi@RqusS^FxkO2LE-6LJN3IWc-&S(OXIWpcC^Bu zVKY24`$$vTGY_D=dQZMNV~QDVFR4Dkh^t_NK|m3;99d~Kjcr&Xu{a(@T~bTYi-fn& zey^?;wrf_*P3py7w@*P&W|NRNNj+QF@VY|p62&j{>UE6^(Bw>TKj@<$Rz#*a@4o|1 z;Wrn#{AUulYPbngG1`H)N5LSv92#Yqbkv!Ttzj_}qk*hfmk)5ajODiD?Er@J2;AbE zjX&*(cMi^*)Dn3DM;u~|PdLS!J6p(V{-A@_wbtnZMWw-O1mcx!@0u7*lZ?l~IFy|4 zYD}K~LaVcA!zv&l^K|;!!n9Un$w_UpM*|IOusEX!NNYWb5M{iBz?qh%cm@VaSkt#RIO zSUs9v7U%nSCTc#QJ_v7c!7&{bjk~v=gW&y3pI8&t-3$Sv=rMxA(cfk1oy5e?-Q_{% z%!AY5 zmhBSX%+xd^Mu(2*!Mj(bWBQ`S^P;Zs(j7_85I?(D-s{obkOHGb{tvuC8Q~rn1rvXl zfA2t`$#_X78N~%E>QvF7(v1=w9>#Y-Ex03uWN|aEr2GXsvvOG07i^5>lUh6IfZK1{> zd1cJ_=@+O`p$D>8wCZ!{6>^U7AL@Q_6&ot?83ndhthkMxoVa1!?iB`}EKWUc@*f@d z1|hMNh=w3ODkoVL#SB@dqg?G%B0sZ9a>~)Ii-%8GT$%hw?j4vK&KISf1&%3IKPU6P zM6Xrb-sO>dO=6*8Kb;X)o9W0h{E%If&sAR_zSCHmr`3D?CrKJ(5QQR|HTk}}fxNbN za#HPBsZAKRp&^W-r>FlHqZPCXI)xq>{SH3MP#(AupQ_*XBxE^FfHCB;e4Uyy?_O|q zn2E^gE{iUoN#B?RZl1H*jcX^VI@ZYBUq>fR4+tfTvN~l&=KnR;466UdVfZ@G<8Xhh z=vG^3(PXNjuC8wQ4Dx=vl;$`g;rWCzzcU!V;lY7|m+{(Ukbi}GuR>%lWnRj3HD)<1 z&>sk&S}!PsAO?N2q9V=1Lddo*sH1nUGhi8?x#r|!@bp;C<~T~GnXgW*Cz0c`Ejsa; z!`ksyDc#0ry~A}ON4LGm6@=i+U7_>*rAobu&51m*y_`I+Nl{VJ111F9FF5V}2;%kG zYRG+~%pF}tkDZX2AuTB27f;^3qmR}EPaVlxeZz)LpV^9jH2L;F3XGL4*(Z(<*4Fvi z{iEuCmCNHvVko4eq?`4FzGC5^#e6If-{OB;_Xhc}_IMI_j|{#zLE)HB>W+Qe#ZfPu zEqa3dJt^%ysa3GU<-7>CQ1JZA!l1LM$Eh^%-Q1M0ROOX+@Lave1xfv_qj8~kZ64KC z&{F@WMq9Be?1#hKoPfOEli{6WGV8)c9QzB z-q3&*UTc9d9fV%_F@&uv;8L&W{NX}IFyIUZzJG{%I=cM3po!t+gyZt!iAqawo*4dm z$&!liYSID2XBQ*P76SGg%JR8>00oLkd+hWp7a^(E-!E&tpFlWF6((fySvX_%B5o)` zcWpocC{n>~waXv=LsCVV6M;Z9!w*N+%punfiFK=wJ5)0M8<&Ut(C{W?y35sWu#ShT zuifDseuqE=qd#@)A0Bc@P3WFQ&`J6JqYyuG=k$#pz6dqJdsIM%hK8GylZiDoHGfbH z7-fcM);|<=uzy3bj-qb1`y=ot7zg`r8*?wotv0@?dBGO2x07UU_mVE{cgXIaPGNpW zmUk6D{_ePF{8nROM;JqAW*3bY00ujn73-NoI-TjV%JNT0{N+;7{w zuQIz&q6dc{moIlkeTBnkFc~yxH#9y?y9r$+4!Fg2x9o8Y?bQVap6sqp^adbDLEyk! z5Y@Hw;nh8KY)}2P!4cZyEr-^;o-Y$3ZP^7$sj1=Z^w^MZ-2WEp=IZLX(T_z*2>XB{ zXSzS^jBZZB5P%LyY}=!o9NNT%W!-Q{&>Hp&-sj(%Es9dnJ^z&tFeT*D6#@ty!- ziRI3-QL&<@e(>WAET{o;hWW1>+vPOkOy*)w32Xjet&Ggoyj~?+$U$Yr!Po89mX?Qw zh0LikBk9X&1>VmnN+?Hk`i;-QJn6o^0?}WL@QjR%jdRrHA#RRpp4@$09@YZX->&bG zWPP;tmUZ2h?%nXi8r0+=g1}(*^HY-%xS#-LB&s2VKe!`MEvSQzj?QA1Buz7TigH>q z@cMnu&cD#H4_xbyGEbe3QwsNlsVKUZ)mzEf+xDHeouthNJCpzj3J8ku3JCH71@#3) zC51&KMTEEo1SACnx;pUC{+EHPyRDPG|Nn0=vDeUzGT`_h58h6$c3$4^Ts{7;4a(1d VJLz`kctY6#RFyOpYvrxN{tH9eh~fYM literal 0 HcmV?d00001 From ea78b75515db47dac677fd7a389bc7fadbec3d29 Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Tue, 9 Dec 2025 18:46:18 +0530 Subject: [PATCH 08/13] chore: add dockerignore --- .dockerignore | 17 +++++++++++++++++ .../workflows/build-wavefront-web-develop.yaml | 2 +- .gitignore | 1 + .../client/Dockerfile | 0 4 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 .dockerignore rename docker/wavefront-web.Dockerfile => wavefront/client/Dockerfile (100%) diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..79b81821 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,17 @@ +**/*/node_modules +**/*/dist +**/*/.git +**/*/.github +**/*/.gitignore +**/*/.env +**/*/.env.staging +**/*/.env.production +**/*/.prettierrc.json +**/*/eslint.config.js +**/*/.husky +**/*/.vscode +**/*/.yarn +**/*/.yarnrc +.yarnrc.yml +README.md +extras \ No newline at end of file diff --git a/.github/workflows/build-wavefront-web-develop.yaml b/.github/workflows/build-wavefront-web-develop.yaml index 4e9d91db..270758e3 100644 --- a/.github/workflows/build-wavefront-web-develop.yaml +++ b/.github/workflows/build-wavefront-web-develop.yaml @@ -43,7 +43,7 @@ jobs: - name: Build Docker Image id: build-image run: | - docker build -f docker/wavefront-web.Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . + docker build -f wavefront/client/Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . echo "IMAGE_TAG=${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }}" >> $GITHUB_ENV - id: "Auth-to-GCP" diff --git a/.gitignore b/.gitignore index ca51f24b..27a02c09 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,5 @@ bin examples/local/* .logs node_modules/ +.vite flo_ai/usecases diff --git a/docker/wavefront-web.Dockerfile b/wavefront/client/Dockerfile similarity index 100% rename from docker/wavefront-web.Dockerfile rename to wavefront/client/Dockerfile From 832546b372c5ccc5630740ea39dc7842026ff96a Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Thu, 11 Dec 2025 15:35:52 +0530 Subject: [PATCH 09/13] chore: update app creation flow --- wavefront/client/src/api/app-service.ts | 2 +- .../client/src/components/DashboardLayout.tsx | 36 +---- .../client/src/components/topbar/Topbar.tsx | 81 ++++------ wavefront/client/src/config/env.ts | 10 +- .../src/hooks/data/mutation-functions.ts | 12 +- wavefront/client/src/pages/apps/create.tsx | 139 +++++++++++------- .../client/src/pages/apps/edit/[appId].tsx | 129 +++++++--------- wavefront/client/src/pages/apps/schemas.ts | 61 +------- wavefront/client/src/pages/login/index.tsx | 12 +- wavefront/client/src/router/index.tsx | 6 - wavefront/client/src/types/app.ts | 18 +-- 11 files changed, 201 insertions(+), 305 deletions(-) diff --git a/wavefront/client/src/api/app-service.ts b/wavefront/client/src/api/app-service.ts index 09f40039..18beb230 100644 --- a/wavefront/client/src/api/app-service.ts +++ b/wavefront/client/src/api/app-service.ts @@ -38,7 +38,7 @@ export class AppService { appId: string, appData: UpdateAppRequest ): Promise { - const response: IApiResponse = await this.http.put( + const response: IApiResponse = await this.http.patch( `/v1/apps/${appId}`, appData ); diff --git a/wavefront/client/src/components/DashboardLayout.tsx b/wavefront/client/src/components/DashboardLayout.tsx index c58565e9..42efd643 100644 --- a/wavefront/client/src/components/DashboardLayout.tsx +++ b/wavefront/client/src/components/DashboardLayout.tsx @@ -1,6 +1,6 @@ import Topbar from "@app/components/topbar/Topbar"; -import { App } from "@app/types/app"; import { CURRENT_PATH_KEY } from "@app/lib/constants"; +import { App } from "@app/types/app"; import { useCallback, useEffect, useRef } from "react"; import { Outlet, useLocation, useNavigate } from "react-router"; @@ -11,30 +11,7 @@ interface IUser { id: string; } -interface LayoutConfig { - logo?: { - logo: string | null; - alt: string; - width?: number; - }; -} - -const DashboardLayout = ({ - user, - config, - customTitle, - poweredByFlag = true, - hideSidebar = false, - apps = [], -}: { - user: IUser; - config?: LayoutConfig; - customTitle?: string; - poweredByFlag?: boolean; - hideSidebar?: boolean; - apps: App[]; -}) => { - const { logo = { logo: null, alt: "Logo", width: 140 } } = config || {}; +const DashboardLayout = ({ user, apps = [] }: { user: IUser; apps: App[] }) => { const currentPath = useLocation(); const navigate = useNavigate(); const timeoutRef = useRef(null); @@ -124,14 +101,7 @@ const DashboardLayout = ({ return (
- +
diff --git a/wavefront/client/src/components/topbar/Topbar.tsx b/wavefront/client/src/components/topbar/Topbar.tsx index 2f303f8c..93212923 100644 --- a/wavefront/client/src/components/topbar/Topbar.tsx +++ b/wavefront/client/src/components/topbar/Topbar.tsx @@ -1,3 +1,4 @@ +import { RootfloIcon } from "@app/assets/icons"; import { DropdownMenu, DropdownMenuContent, @@ -14,39 +15,30 @@ import { SelectValue, } from "@app/components/ui/select"; import { IUser } from "@app/pages/types"; +import { useAuthStore } from "@app/store"; import { useDashboardStore } from "@app/store/dashboard-store"; import { App } from "@app/types/app"; -import { Link2Icon, UserIcon } from "lucide-react"; -import { useEffect, useState } from "react"; +import { UserIcon } from "lucide-react"; +import { useEffect } from "react"; import { useNavigate } from "react-router"; -const Topbar = ({ - user, - customTitle, - poweredByFlag = true, - logo, - hideSidebar = false, - apps = [], -}: { - user: IUser; - customTitle?: string; - poweredByFlag?: boolean; - logo?: { - logo: string | null; - alt: string; - width?: number; - }; - hideSidebar: boolean; - apps: App[]; -}) => { - const [imageLoading, setImageLoading] = useState(true); +const Topbar = ({ user, apps = [] }: { user: IUser; apps: App[] }) => { const { selectedApp, setSelectedApp } = useDashboardStore(); + const { authenticated } = useAuthStore(); const navigate = useNavigate(); const handleLogout = () => { navigate("/logout"); }; + const handleNavIconClick = () => { + if (authenticated) { + navigate("/apps"); + } else { + navigate("/login"); + } + }; + useEffect(() => { const pageUrl = new URL(window.location.href); const appId = pageUrl.pathname.split("/")[2]; @@ -55,42 +47,21 @@ const Topbar = ({ return (
- {hideSidebar && ( -
navigate("/")} - > - {logo && logo.logo && ( - <> - {imageLoading && ( -
- )} - Uploaded preview setImageLoading(false)} - /> - - )} -
- )} + + +
-

{customTitle}

- {poweredByFlag &&

|

} - {poweredByFlag && ( -
-

Powered by

- -
- )} +

AI Middleware

+ {/*

|

*/} + {/*
+

Powered by

+ +
*/}
{apps && ( diff --git a/wavefront/client/src/config/env.ts b/wavefront/client/src/config/env.ts index 8062ee6a..5f519da1 100644 --- a/wavefront/client/src/config/env.ts +++ b/wavefront/client/src/config/env.ts @@ -3,12 +3,16 @@ export const getConfig = () => { }; const baseURL = import.meta.env.VITE_BASE_URL || getConfig().BASE_URL; -const isStaging = import.meta.env.VITE_APP_ENV || getConfig().APP_ENV; +const env = import.meta.env.VITE_APP_ENV || getConfig().APP_ENV; const isApiServicesEnabled = - import.meta.env.VITE_FEATURE_API_SERVICES === 'true' || getConfig().FEATURE_API_SERVICES === 'true'; + import.meta.env.VITE_FEATURE_API_SERVICES === "true" || + getConfig().FEATURE_API_SERVICES === "true"; export const appEnv = { baseURL, - isStaging: isStaging === 'staging', + isLocal: env === "local", + isDev: env === "development", + isStaging: env === "staging", + isProd: env === "production", isApiServicesEnabled, }; diff --git a/wavefront/client/src/hooks/data/mutation-functions.ts b/wavefront/client/src/hooks/data/mutation-functions.ts index 716be949..f00da2a7 100644 --- a/wavefront/client/src/hooks/data/mutation-functions.ts +++ b/wavefront/client/src/hooks/data/mutation-functions.ts @@ -20,16 +20,14 @@ export const updateAgentMutationFn = async ({ export const updateAppFn = async (data: { appId: string; appName: string; - appUrl: string; - appKey: string; - appSecret: string; + public_url: string; + private_url: string; }) => { - const { appId, appName, appUrl, appKey, appSecret } = data; + const { appId, appName, public_url, private_url } = data; const response = await floConsoleService.appService.updateApp(appId, { app_name: appName, - app_url: appUrl, - app_key: appKey, - app_secret: appSecret, + public_url: public_url, + private_url: private_url, }); return response.data; }; diff --git a/wavefront/client/src/pages/apps/create.tsx b/wavefront/client/src/pages/apps/create.tsx index 0ace0255..036b7623 100644 --- a/wavefront/client/src/pages/apps/create.tsx +++ b/wavefront/client/src/pages/apps/create.tsx @@ -24,6 +24,8 @@ import { useForm } from "react-hook-form"; import { useNavigate } from "react-router"; import { z } from "zod"; import { createAppSchema } from "./schemas"; +import { Checkbox } from "@app/components/ui/checkbox"; +import { appEnv } from "@app/config/env"; type TCreateAppInputSchema = z.infer; @@ -40,7 +42,7 @@ const CreateApp: React.FC = () => { const form = useForm({ resolver: zodResolver(createAppSchema), defaultValues: { - deployment_type: "auto", + deployment_type: "manual", }, }); @@ -79,7 +81,7 @@ const CreateApp: React.FC = () => { // Effect to handle polling useEffect(() => { - if (pollingAppId) { + if (pollingAppId && deploymentType === "auto") { // Start polling immediately pollAppStatus(pollingAppId); @@ -96,7 +98,7 @@ const CreateApp: React.FC = () => { pollingIntervalRef.current = null; } }; - }, [pollingAppId, pollAppStatus]); + }, [pollingAppId, pollAppStatus, deploymentType]); const appCreationSubmit = async (formData: TCreateAppInputSchema) => { setCreating(true); @@ -105,16 +107,22 @@ const CreateApp: React.FC = () => { const appData: CreateAppRequest = { app_name: formData.app_name, deployment_type: formData.deployment_type, - app_url: formData.app_url || "", - app_secret: formData.app_secret || "", - app_key: formData.app_key || "", + public_url: formData.public_url, + private_url: formData.private_url, }; const response = await floConsoleService.appService.createApp(appData); if (response.data?.data?.app.status === "in_progress") { - // Start polling for status updates - setPollingAppId(response.data.data.app.id); + // Start polling for status updates only if deployment type is auto + if (formData.deployment_type === "auto") { + setPollingAppId(response.data.data.app.id); + } else { + // For manual deployment, just show success + setCreating(false); + notifySuccess("App created successfully"); + navigate("/apps"); + } } else if (response.data?.data?.app.status === "success") { // If already successful, show success immediately setCreating(false); @@ -132,6 +140,18 @@ const CreateApp: React.FC = () => { navigate("/apps"); }; + const handleAddLocalApp = () => { + form.setValue("app_name", "localhost"); + form.setValue("public_url", "http://localhost:8001"); + form.setValue("private_url", "http://localhost:8001"); + }; + + const handleRemoveLocalApp = () => { + form.setValue("app_name", ""); + form.setValue("public_url", ""); + form.setValue("private_url", ""); + }; + return (
@@ -139,11 +159,32 @@ const CreateApp: React.FC = () => { onSubmit={form.handleSubmit(appCreationSubmit)} className="flex w-full max-w-[940px] flex-col gap-16 rounded-2xl bg-white p-8 shadow-[0_4px_40px_0_rgba(0,0,0,0.04)]" > -
-

Create new app

-

- Add a new application to the console -

+
+
+

+ Create new app +

+

+ Add a new application to the console +

+
+ {appEnv.isLocal && ( + + )}
@@ -173,7 +214,11 @@ const CreateApp: React.FC = () => { - + Auto @@ -187,53 +232,35 @@ const CreateApp: React.FC = () => { />
-
-
-
- ( - - App URL - - - - - - )} - /> - ( - - App Key - - - - - - )} - /> -
+
+
+ ( + + Public URL + + + + + + )} + /> ( - App Secret + Private URL - + diff --git a/wavefront/client/src/pages/apps/edit/[appId].tsx b/wavefront/client/src/pages/apps/edit/[appId].tsx index 606edab6..53a898b1 100644 --- a/wavefront/client/src/pages/apps/edit/[appId].tsx +++ b/wavefront/client/src/pages/apps/edit/[appId].tsx @@ -20,24 +20,22 @@ import { useNotifyStore } from "@app/store"; import { zodResolver } from "@hookform/resolvers/zod"; import { useQueryClient } from "@tanstack/react-query"; import { X } from "lucide-react"; -import React, { useEffect, useState } from "react"; +import React, { useEffect } from "react"; import { useForm } from "react-hook-form"; import { useNavigate, useParams } from "react-router"; import { z } from "zod"; -import { baseAppSchema } from "../schemas"; +import { createAppSchema } from "../schemas"; -type TEditAppInputSchema = z.infer; +type TEditAppInputSchema = z.infer; const EditApp: React.FC = () => { const { appId } = useParams<{ appId: string }>(); const navigate = useNavigate(); const { notifySuccess, notifyError } = useNotifyStore(); - const [updating, setUpdating] = useState(false); - const { data: response } = useGetAppById(appId!, !!appId); const queryClient = useQueryClient(); - const { mutate: updateApp } = useUpdateApp( + const { mutate: updateApp, isPending: isUpdating } = useUpdateApp( queryClient, notifySuccess, notifyError @@ -49,13 +47,12 @@ const EditApp: React.FC = () => { response; const form = useForm({ - resolver: zodResolver(baseAppSchema), + resolver: zodResolver(createAppSchema), defaultValues: { deployment_type: "auto" as "manual" | "auto", app_name: "", - app_url: "", - app_key: "", - app_secret: "", + public_url: "", + private_url: "", }, }); @@ -66,27 +63,23 @@ const EditApp: React.FC = () => { deployment_type: (appData.deployment_type as "manual" | "auto") || "auto", app_name: appData.app_name || "", - app_url: appData.app_url || "", - app_key: appData.app_key || "", - app_secret: "", // Don't populate secret for security + public_url: appData.public_url || "", + private_url: appData.private_url || "", }); } }, [appData, form]); const handleEditAppSubmit = async (formData: TEditAppInputSchema) => { - setUpdating(true); try { updateApp({ appId: appId!, appName: formData.app_name, - appUrl: formData.app_url!, - appKey: formData.app_key!, - appSecret: formData.app_secret!, + public_url: formData.public_url!, + private_url: formData.private_url!, }); + navigate(`/apps`); } catch (error) { console.error("Error updating app:", error); - } finally { - setUpdating(false); } }; @@ -157,70 +150,48 @@ const EditApp: React.FC = () => { )} />
- {appData?.deployment_type === "manual" && ( - <> -
- ( - - App URL - - - - - - )} - /> - ( - - App Key - - - - - - )} - /> -
-
- ( - - App Secret (Optional) - - - - - - )} - /> -
- - )} + +
+ ( + + Public URL + + + + + + )} + /> + ( + + Private URL + + + + + + )} + /> +
- -
diff --git a/wavefront/client/src/pages/apps/schemas.ts b/wavefront/client/src/pages/apps/schemas.ts index 5d62e287..8126d526 100644 --- a/wavefront/client/src/pages/apps/schemas.ts +++ b/wavefront/client/src/pages/apps/schemas.ts @@ -1,62 +1,17 @@ -import { z } from 'zod'; +import { z } from "zod"; -export const baseAppSchema = z.object({ +export const createAppSchema = z.object({ app_name: z .string() - .min(2, { message: 'App name must be at least 2 characters long' }) + .min(2, { message: "App name must be at least 2 characters long" }) .regex(/^[a-zA-Z0-9_-]+$/, { - message: 'App name can only contain letters, numbers, underscores, and hyphens (no spaces)', + message: + "App name can only contain letters, numbers, underscores, and hyphens (no spaces)", }), - deployment_type: z.enum(['manual', 'auto']), + deployment_type: z.enum(["manual", "auto"]), // Keep these purely optional โ€” no validation here - app_url: z.string().optional(), - app_secret: z.string().optional(), - app_key: z.string().optional(), -}); - -export const createAppSchema = baseAppSchema.superRefine((data, ctx) => { - if (data.deployment_type === 'manual') { - // validate app_url - if (!data.app_url) { - ctx.addIssue({ - code: z.ZodIssueCode.custom, - message: 'App url is required for manual deployment', - path: ['app_url'], - }); - } else { - // URL validation only if provided - const urlSchema = z - .string() - .url() - .regex(/^https?:\/\/[^\s/$.?#].[^\s]*$/i); - const result = urlSchema.safeParse(data.app_url); - if (!result.success) { - ctx.addIssue({ - code: z.ZodIssueCode.custom, - message: 'Invalid URL format', - path: ['app_url'], - }); - } - } - - // validate secret - if (!data.app_secret) { - ctx.addIssue({ - code: z.ZodIssueCode.custom, - message: 'App secret is required for manual deployment', - path: ['app_secret'], - }); - } - - // validate key - if (!data.app_key) { - ctx.addIssue({ - code: z.ZodIssueCode.custom, - message: 'App key is required for manual deployment', - path: ['app_key'], - }); - } - } + public_url: z.string().url(), + private_url: z.string().url(), }); diff --git a/wavefront/client/src/pages/login/index.tsx b/wavefront/client/src/pages/login/index.tsx index 524b7a66..bd79355c 100644 --- a/wavefront/client/src/pages/login/index.tsx +++ b/wavefront/client/src/pages/login/index.tsx @@ -1,5 +1,6 @@ import floConsoleService from "@app/api"; import { RootfloIcon } from "@app/assets/icons"; +import aiCircle from "@app/assets/images/ai_circle.png"; import { Button } from "@app/components/ui/button"; import { Form, @@ -15,11 +16,10 @@ import { validationMessage } from "@app/utils/form-validation"; import { emailRegex } from "@app/utils/regex"; import { zodResolver } from "@hookform/resolvers/zod"; import { EyeIcon, EyeOffIcon } from "lucide-react"; -import { useState } from "react"; +import { useEffect, useState } from "react"; import { useForm } from "react-hook-form"; import { Link, useNavigate } from "react-router"; import { z } from "zod"; -import aiCircle from "@app/assets/images/ai_circle.png"; export const LoginSchema = z.object({ email: z @@ -67,6 +67,14 @@ const Login = () => { } }; + useEffect(() => { + const token = localStorage.getItem(TOKEN_KEY); + if (token) { + setAuthenticatedState(true); + navigate("/apps"); + } + }, []); + return (
{ user={ user || { first_name: "", last_name: "", email: "", id: "" } } - customTitle="AI Middleware" - poweredByFlag={false} - config={{ - logo: { logo: "/logo.svg", alt: "AI Middleware", width: 90 }, - }} - hideSidebar={true} apps={apps} /> } diff --git a/wavefront/client/src/types/app.ts b/wavefront/client/src/types/app.ts index bc9b8089..bd9c4540 100644 --- a/wavefront/client/src/types/app.ts +++ b/wavefront/client/src/types/app.ts @@ -4,27 +4,25 @@ import { IApiResponse } from "@app/lib/axios"; export interface App { id: string; app_name: string; - app_url: string; - app_key: string; created_at: string; - updated_at: string | null; - status: string; config: Record; + public_url: string; + private_url: string; + status: string; + updated_at: string | null; } export interface CreateAppRequest { app_name: string; - app_url: string; - app_secret: string; - app_key: string; deployment_type: string; + public_url: string; + private_url: string; } export interface UpdateAppRequest { app_name?: string; - app_url?: string; - app_secret?: string; - app_key?: string; + public_url?: string; + private_url?: string; } export interface AppData { From 03b2bee32a5a77cbd764ba48a56bd50d35652d5d Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Thu, 11 Dec 2025 18:21:27 +0530 Subject: [PATCH 10/13] chore: migrate remaining changes from floware --- .../controllers/webhook_controller.py | 6 +- .../call_processing/services/llm_service.py | 8 +- .../services/pipecat_service.py | 108 +++++++++- .../call_processing/services/tts_service.py | 2 - .../apps/call_processing/pyproject.toml | 2 +- .../floconsole/controllers/app_controller.py | 77 ++++--- .../floconsole/controllers/user_controller.py | 195 ++++++++++++++++++ ...63aed0f81_remove_app_key_and_app_secret.py | 49 +++++ .../floconsole/floconsole/db/models/app.py | 13 +- .../floconsole/di/application_container.py | 6 + .../floconsole/services/app_service.py | 10 +- .../services/floware_proxy_service.py | 52 +---- .../floconsole/services/user_service.py | 40 ++++ .../server/apps/floware/floware/server.py | 3 +- wavefront/server/uv.lock | 9 +- 15 files changed, 465 insertions(+), 115 deletions(-) create mode 100644 wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_09_1328-85a63aed0f81_remove_app_key_and_app_secret.py create mode 100644 wavefront/server/apps/floconsole/floconsole/services/user_service.py diff --git a/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py b/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py index 08230762..8a34655f 100644 --- a/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py +++ b/wavefront/server/apps/call_processing/call_processing/controllers/webhook_controller.py @@ -15,6 +15,9 @@ # Pipecat imports for WebSocket handling from pipecat.runner.types import WebSocketRunnerArguments from pipecat.runner.utils import parse_telephony_websocket + +# from pipecat.audio.turn.smart_turn.local_smart_turn_v3 import LocalSmartTurnAnalyzerV3 +# from pipecat.audio.turn.smart_turn.base_smart_turn import SmartTurnParams from pipecat.serializers.twilio import TwilioFrameSerializer from pipecat.audio.vad.silero import SileroVADAnalyzer from pipecat.audio.vad.vad_analyzer import VADParams @@ -162,11 +165,12 @@ async def websocket_endpoint( params=VADParams( confidence=0.7, # Default is 0.7, can lower to 0.4-0.5 for faster detection start_secs=0.15, # Default is 0.2, keep it - stop_secs=0.5, # KEY: Lower from default 0.8 for faster cutoff + stop_secs=0.8, # KEY: Lower from default 0.8 for faster cutoff (should be 0.2 for smart turn detection) min_volume=0.6, # Default is 0.6, adjust based on your audio quality ), ), # Voice Activity Detection serializer=serializer, + # turn_analyzer=LocalSmartTurnAnalyzerV3(params=SmartTurnParams()), ), ) diff --git a/wavefront/server/apps/call_processing/call_processing/services/llm_service.py b/wavefront/server/apps/call_processing/call_processing/services/llm_service.py index 790ac6b7..6bf2b9b4 100644 --- a/wavefront/server/apps/call_processing/call_processing/services/llm_service.py +++ b/wavefront/server/apps/call_processing/call_processing/services/llm_service.py @@ -25,7 +25,7 @@ def create_llm_service(llm_config: Dict[str, Any]): Args: llm_config: { - 'type': 'openai' | 'anthropic' | 'google' | 'groq', + 'type': 'openai' | 'anthropic' | 'gemini' | 'groq', 'api_key': 'key', 'llm_model': 'gpt-4', 'parameters': { @@ -50,7 +50,7 @@ def create_llm_service(llm_config: Dict[str, Any]): if llm_type == 'openai': return LLMServiceFactory._create_openai_llm(api_key, model, parameters) - elif llm_type == 'google': + elif llm_type == 'gemini': return LLMServiceFactory._create_google_llm(api_key, model, parameters) elif llm_type == 'groq': return LLMServiceFactory._create_groq_llm(api_key, model, parameters) @@ -65,8 +65,6 @@ def _create_openai_llm(api_key: str, model: str, parameters: Dict[str, Any]): if 'temperature' in parameters: params_dict['temperature'] = parameters['temperature'] - if 'max_tokens' in parameters: - params_dict['max_tokens'] = parameters['max_tokens'] if 'max_completion_tokens' in parameters: params_dict['max_completion_tokens'] = parameters['max_completion_tokens'] if 'top_p' in parameters: @@ -121,8 +119,6 @@ def _create_groq_llm(api_key: str, model: str, parameters: Dict[str, Any]): if 'temperature' in parameters: params_dict['temperature'] = parameters['temperature'] - if 'max_tokens' in parameters: - params_dict['max_tokens'] = parameters['max_tokens'] if 'max_completion_tokens' in parameters: params_dict['max_completion_tokens'] = parameters['max_completion_tokens'] if 'top_p' in parameters: diff --git a/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py b/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py index 99210627..1eaabea1 100644 --- a/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py +++ b/wavefront/server/apps/call_processing/call_processing/services/pipecat_service.py @@ -8,9 +8,11 @@ from call_processing.log.logger import logger # Pipecat core imports +from pipecat.adapters.schemas.tools_schema import ToolsSchema from pipecat.audio.interruptions.min_words_interruption_strategy import ( MinWordsInterruptionStrategy, ) +from pipecat.frames.frames import TTSSpeakFrame, EndTaskFrame from pipecat.pipeline.pipeline import Pipeline from pipecat.pipeline.runner import PipelineRunner from pipecat.pipeline.task import PipelineParams, PipelineTask @@ -18,13 +20,106 @@ from pipecat.processors.aggregators.llm_response_universal import ( LLMContextAggregatorPair, ) +from pipecat.processors.frame_processor import FrameProcessor, FrameDirection +from pipecat.processors.user_idle_processor import UserIdleProcessor from pipecat.transports.base_transport import BaseTransport - +from pipecat.services.llm_service import FunctionCallParams from call_processing.services.stt_service import STTServiceFactory from call_processing.services.tts_service import TTSServiceFactory from call_processing.services.llm_service import LLMServiceFactory +# Advanced handler with retry logic +async def handle_user_idle(processor: FrameProcessor, retry_count): + if retry_count == 1: + # First attempt - gentle reminder + await processor.push_frame(TTSSpeakFrame('Are you still there?')) + return True # Continue monitoring + elif retry_count == 2: + # Second attempt - more direct prompt + await processor.push_frame( + TTSSpeakFrame('Would you like to continue our conversation?') + ) + return True # Continue monitoring + else: + # Third attempt - end conversation + await processor.push_frame( + TTSSpeakFrame("I'll leave you for now. Have a nice day!") + ) + await processor.push_frame(EndTaskFrame(), FrameDirection.UPSTREAM) + return False # Stop monitoring + + +user_idle = UserIdleProcessor( + callback=handle_user_idle, # Your callback function + timeout=4.0, # Seconds of inactivity before triggering +) + + +async def evaluate_completion_criteria(params: FunctionCallParams): + """ + Check if the last user message contains goodbye-related phrases. + Returns True if goodbye detected, False otherwise. + """ + context = params.context + + # Get the conversation messages + messages = context.get_messages() + + # Find the last user message + last_user_message = None + for message in reversed(messages): + if message.get('role') == 'user': + last_user_message = message.get('content', '').lower() + break + + # If no user message found, conversation is not complete + if not last_user_message: + return False + + # List of goodbye phrases to check + goodbye_phrases = [ + 'goodbye', + 'bye', + 'good bye', + 'see you', + 'talk to you later', + 'ttyl', + 'have a good day', + 'take care', + 'farewell', + 'later', + 'peace out', + ] + + # Check if any goodbye phrase is in the message + return any(phrase in last_user_message for phrase in goodbye_phrases) + + +async def check_conversation_complete(params: FunctionCallParams): + """ + Function to check if conversation should end based on goodbye detection. + """ + # Check if goodbye is present + conversation_complete = await evaluate_completion_criteria(params) + + if conversation_complete: + # Send farewell message + await params.llm.push_frame( + TTSSpeakFrame('Thank you for using our service! Goodbye!') + ) + # End the conversation + await params.llm.push_frame(EndTaskFrame(), FrameDirection.UPSTREAM) + + # Return result to LLM + await params.result_callback( + { + 'status': 'complete' if conversation_complete else 'continuing', + 'goodbye_detected': conversation_complete, + } + ) + + class PipecatService: """Service for creating and running Pipecat pipelines""" @@ -61,8 +156,14 @@ async def run_conversation( } ] + # ADD: Register function handler with LLM service + llm.register_function( + 'check_conversation_complete', check_conversation_complete + ) + + tools = ToolsSchema(standard_tools=[check_conversation_complete]) # Create LLM context and aggregator - context = LLMContext(messages) + context = LLMContext(messages, tools=tools) context_aggregator = LLMContextAggregatorPair(context) # Create pipeline @@ -70,6 +171,7 @@ async def run_conversation( [ transport.input(), # Audio input from Twilio stt, # Speech-to-Text + user_idle, context_aggregator.user(), # Add user message to context llm, # LLM processing tts, # Text-to-Speech @@ -90,7 +192,7 @@ async def run_conversation( interruption_strategies=[MinWordsInterruptionStrategy(min_words=2)], # report_only_initial_ttfb=True ), - idle_timeout_secs=12, + idle_timeout_secs=20, # Safety net - allows UserIdleProcessor to complete 3 retries (4s each = 12s total) ) # Register event handlers diff --git a/wavefront/server/apps/call_processing/call_processing/services/tts_service.py b/wavefront/server/apps/call_processing/call_processing/services/tts_service.py index c8ac3a9d..9e0a960d 100644 --- a/wavefront/server/apps/call_processing/call_processing/services/tts_service.py +++ b/wavefront/server/apps/call_processing/call_processing/services/tts_service.py @@ -151,8 +151,6 @@ def _create_cartesia_tts(api_key: str, voice_id: str, parameters: Dict[str, Any] if 'speed' in parameters: params_dict['speed'] = parameters['speed'] - if 'emotion' in parameters: - params_dict['emotion'] = parameters['emotion'] # Create InputParams object (only if we have params) input_params = ( diff --git a/wavefront/server/apps/call_processing/pyproject.toml b/wavefront/server/apps/call_processing/pyproject.toml index 66be64a1..4cb97c09 100644 --- a/wavefront/server/apps/call_processing/pyproject.toml +++ b/wavefront/server/apps/call_processing/pyproject.toml @@ -21,7 +21,7 @@ dependencies = [ "redis>=5.0.0", "tenacity>=8.0.0", # Pipecat and voice processing - "pipecat-ai[websocket,cartesia,google,silero,deepgram,groq,runner]==0.0.91", + "pipecat-ai[websocket,cartesia,google,silero,deepgram,groq,runner]==0.0.97", # Twilio "twilio>=8.0.0", ] diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py index 639ff468..d2c95a15 100644 --- a/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py +++ b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py @@ -29,9 +29,8 @@ class CreateAppRequest(BaseModel): app_name: str - app_url: Optional[str] = None - app_secret: Optional[str] = None - app_key: Optional[str] = None + public_url: Optional[str] = None + private_url: Optional[str] = None deployment_type: AppDeploymentType = AppDeploymentType.MANUAL type: str = 'custom' @@ -39,16 +38,15 @@ class CreateAppRequest(BaseModel): class UpdateAppRequest(BaseModel): deployment_type: Optional[str] = None app_name: Optional[str] = None - app_url: Optional[str] = None - app_secret: Optional[str] = None - app_key: Optional[str] = None + public_url: Optional[str] = None + private_url: Optional[str] = None class AppResponse(BaseModel): id: str app_name: str - app_url: str - app_key: Optional[str] = None + public_url: str + private_url: str status: AppStatus config: dict deployment_type: str @@ -61,8 +59,8 @@ def from_model(cls, app): return cls( id=str(app.id), app_name=app.app_name, - app_url=app.app_url, - app_key=app.app_key, + public_url=app.public_url, + private_url=app.private_url, status=app.status, config=app.config, deployment_type=app.deployment_type, @@ -110,16 +108,22 @@ async def create_app( ), ) if app_data.deployment_type == AppDeploymentType.MANUAL: - if not app_data.app_secret or not app_data.app_key or not app_data.app_url: + if not app_data.public_url: return JSONResponse( status_code=status.HTTP_400_BAD_REQUEST, content=response_formatter.buildErrorResponse( - 'App secret, app key and app URL are required' + 'Public URL is required for manual deployment' ), ) - app_url = app_data.app_url + public_url = app_data.public_url + # For manual deployment, private_url defaults to public_url if not provided + private_url = ( + app_data.private_url if app_data.private_url else app_data.public_url + ) else: - app_url = f'https://{app_data.app_name}.apps.rootflo.ai' + public_url = f'https://{app_data.app_name}.apps.rootflo.ai' + # For auto deployment, private_url defaults to floware internal URL if not provided + private_url = app_data.private_url if app_data.private_url else public_url data = { 'deployment': { @@ -148,10 +152,9 @@ async def create_app( app = await app_service.create_app( app_name=app_data.app_name, - app_url=app_url, + public_url=public_url, + private_url=private_url, status=app_status, - app_secret=app_data.app_secret, - app_key=app_data.app_key, deployment_type=app_data.deployment_type.value, type=app_data.type, config={}, @@ -362,33 +365,23 @@ async def get_app_status( url = f'https://{app.app_name}-floware.apps.rootflo.ai/floware' - response = requests.get(url + '/v1/health') - - if response.status_code != 200: - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse({'status': app.status}), - ) - - hmac_response = requests.post( - url + '/v1/developer/secrets', headers={'X-Passthrough': 'secret'} - ) - res_json = hmac_response.json() + try: + response = requests.get(url + '/v1/health', timeout=10) - if hmac_response.status_code != 201: + if response.status_code == 200: + await app_service.update_app(app_id, status=AppStatus.SUCCESS) + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'status': 'success'}), + ) + else: + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'status': app.status}), + ) + except requests.exceptions.RequestException as e: + logger.warning(f'Health check failed for app {app.app_name}: {str(e)}') return JSONResponse( status_code=status.HTTP_200_OK, content=response_formatter.buildSuccessResponse({'status': app.status}), ) - - await app_service.update_app( - app_id, - status=AppStatus.SUCCESS, - app_key=res_json['data']['client_key'], - app_secret=res_json['data']['client_secret'], - ) - - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse({'status': 'success'}), - ) diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py index ac8ea137..6f83846b 100644 --- a/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py +++ b/wavefront/server/apps/floconsole/floconsole/controllers/user_controller.py @@ -1,3 +1,6 @@ +from typing import Optional +from uuid import UUID + from common_module.common_container import CommonContainer from common_module.log.logger import logger from common_module.response_formatter import ResponseFormatter @@ -13,6 +16,7 @@ from floconsole.db.models.user import User from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository from floconsole.di.application_container import ApplicationContainer +from floconsole.services.user_service import UserService from floconsole.utils.user_utils import get_current_user from floconsole.utils.password_utils import hash_password @@ -27,6 +31,13 @@ class CreateUserRequest(BaseModel): last_name: str +class UpdateUserRequest(BaseModel): + email: Optional[str] = None + password: Optional[str] = None + first_name: Optional[str] = None + last_name: Optional[str] = None + + @user_router.post('/users') @inject async def create_user( @@ -95,3 +106,187 @@ async def get_resources( status_code=status.HTTP_200_OK, content=response_formatter.buildSuccessResponse({'user': user.to_dict()}), ) + + +@user_router.get('/users') +@inject +async def list_users( + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[ApplicationContainer.user_service]), +): + users = await user_service.get_all_users() + users_data = [user.to_dict() for user in users] + + logger.info(f'Retrieved {len(users)} users successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse({'users': users_data}), + ) + + +@user_router.patch('/users/{user_id}') +@inject +async def update_user( + user_id: UUID, + user_data: UpdateUserRequest, + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[ApplicationContainer.user_service]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), + config: dict = Depends(Provide[ApplicationContainer.config]), +): + # Get current user + _, current_user_id, _ = get_current_user(request) + super_admin_emails = config['super_admin']['email'].split(',') + + current_user = await user_repository.find_one(id=current_user_id) + is_super_admin = current_user and current_user.email in super_admin_emails + + # Authorization: users can only edit themselves, super admins can edit anyone + if str(user_id) != str(current_user_id) and not is_super_admin: + logger.warning( + f'User {current_user_id} attempted to edit user {user_id} without permission' + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'You are not authorized to edit this user' + ), + ) + + # Filter out None values + update_data = {k: v for k, v in user_data.model_dump().items() if v is not None} + + if not update_data: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse('No fields to update'), + ) + + # Check email uniqueness if email is being updated + if 'email' in update_data: + existing_user = await user_repository.find_one(email=update_data['email']) + if existing_user and str(existing_user.id) != str(user_id): + logger.warning( + f'Update failed - email already exists: {update_data["email"]}' + ) + return JSONResponse( + status_code=status.HTTP_409_CONFLICT, + content=response_formatter.buildErrorResponse('Email already exists'), + ) + + try: + updated_user = await user_service.update_user(user_id, **update_data) + + if not updated_user: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('User not found'), + ) + + logger.info(f'User {updated_user.email} updated successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'user': updated_user.to_dict()} + ), + ) + except Exception as e: + logger.error(f'Failed to update user: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to update user: {str(e)}' + ), + ) + + +@user_router.delete('/users/{user_id}') +@inject +async def delete_user( + user_id: UUID, + request: Request, + response_formatter: ResponseFormatter = Depends( + Provide[CommonContainer.response_formatter] + ), + user_service: UserService = Depends(Provide[ApplicationContainer.user_service]), + user_repository: SQLAlchemyRepository[User] = Depends( + Provide[ApplicationContainer.user_repository] + ), + config: dict = Depends(Provide[ApplicationContainer.config]), +): + # Get current user + _, current_user_id, _ = get_current_user(request) + super_admin_emails = config['super_admin']['email'].split(',') + + current_user = await user_repository.find_one(id=current_user_id) + is_super_admin = current_user and current_user.email in super_admin_emails + + # Get target user + target_user = await user_repository.find_one(id=user_id, deleted=False) + + if not target_user: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('User not found'), + ) + + target_is_super_admin = target_user.email in super_admin_emails + + # Authorization: normal users cannot delete super admins + if target_is_super_admin and not is_super_admin: + logger.warning( + f'User {current_user_id} attempted to delete super admin {user_id}' + ) + return JSONResponse( + status_code=status.HTTP_403_FORBIDDEN, + content=response_formatter.buildErrorResponse( + 'You are not authorized to delete super admin users' + ), + ) + + # Check minimum super admin constraint + if target_is_super_admin: + super_admin_count = await user_service.count_super_admins(super_admin_emails) + if super_admin_count <= 1: + logger.warning('Cannot delete last super admin user') + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=response_formatter.buildErrorResponse( + 'Cannot delete the last super admin user' + ), + ) + + try: + deleted_user = await user_service.delete_user(user_id) + + if not deleted_user: + return JSONResponse( + status_code=status.HTTP_404_NOT_FOUND, + content=response_formatter.buildErrorResponse('User not found'), + ) + + logger.info(f'User {deleted_user.email} deleted successfully') + + return JSONResponse( + status_code=status.HTTP_200_OK, + content=response_formatter.buildSuccessResponse( + {'message': 'User deleted successfully'} + ), + ) + except Exception as e: + logger.error(f'Failed to delete user: {str(e)}') + return JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content=response_formatter.buildErrorResponse( + f'Failed to delete user: {str(e)}' + ), + ) diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_09_1328-85a63aed0f81_remove_app_key_and_app_secret.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_09_1328-85a63aed0f81_remove_app_key_and_app_secret.py new file mode 100644 index 00000000..88373415 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_09_1328-85a63aed0f81_remove_app_key_and_app_secret.py @@ -0,0 +1,49 @@ +"""remove_app_key_and_app_secret + +Revision ID: 85a63aed0f81 +Revises: 480783ba0ace +Create Date: 2025-12-09 13:28:06.158846 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '85a63aed0f81' +down_revision = '480783ba0ace' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + """Remove app_key and app_secret columns, rename app_url to public_url, and add private_url""" + # Step 1: Remove app_key and app_secret columns + op.drop_column('app', 'app_secret') + op.drop_column('app', 'app_key') + + # Step 2: Add private_url as nullable (temporary) + op.add_column('app', sa.Column('private_url', sa.String(), nullable=True)) + + # Step 3: Populate private_url with existing app_url values + op.execute('UPDATE app SET private_url = app_url') + + # Step 4: Rename app_url to public_url + op.alter_column('app', 'app_url', new_column_name='public_url') + + # Step 5: Make private_url non-nullable + op.alter_column('app', 'private_url', nullable=False) + + +def downgrade() -> None: + """Restore app_key and app_secret columns, rename public_url back to app_url, and remove private_url""" + # Step 1: Rename public_url back to app_url + op.alter_column('app', 'public_url', new_column_name='app_url') + + # Step 2: Drop private_url column + op.drop_column('app', 'private_url') + + # Step 3: Restore app_key and app_secret columns as nullable + op.add_column('app', sa.Column('app_key', sa.String(), nullable=True)) + op.add_column('app', sa.Column('app_secret', sa.String(), nullable=True)) diff --git a/wavefront/server/apps/floconsole/floconsole/db/models/app.py b/wavefront/server/apps/floconsole/floconsole/db/models/app.py index e0b69f75..3340da29 100644 --- a/wavefront/server/apps/floconsole/floconsole/db/models/app.py +++ b/wavefront/server/apps/floconsole/floconsole/db/models/app.py @@ -16,23 +16,24 @@ class App(Base): primary_key=True, default=uuid.uuid4, index=True ) app_name: Mapped[str] = mapped_column(nullable=False) - app_url: Mapped[str] = mapped_column(nullable=False) - app_secret: Mapped[str] = mapped_column(nullable=True) - app_key: Mapped[str] = mapped_column(nullable=True) + public_url: Mapped[str] = mapped_column(nullable=False) + private_url: Mapped[str] = mapped_column(nullable=False) deleted: Mapped[bool] = mapped_column(default=False) status: Mapped[str] = mapped_column(default='in_progress') config: Mapped[dict] = mapped_column(JSON, default={}) deployment_type: Mapped[str] = mapped_column(nullable=False, default='manual') type: Mapped[str] = mapped_column(nullable=False, default='custom') created_at: Mapped[datetime] = mapped_column(default=datetime.now) - updated_at: Mapped[Optional[datetime]] = mapped_column(nullable=True) + updated_at: Mapped[Optional[datetime]] = mapped_column( + nullable=True, default=datetime.now + ) def to_dict(self): return { 'id': str(self.id), 'app_name': self.app_name, - 'app_url': self.app_url, - 'app_key': self.app_key, + 'public_url': self.public_url, + 'private_url': self.private_url, 'status': self.status, 'config': self.config, 'deployment_type': self.deployment_type, diff --git a/wavefront/server/apps/floconsole/floconsole/di/application_container.py b/wavefront/server/apps/floconsole/floconsole/di/application_container.py index 1005a04f..e77f4b7b 100644 --- a/wavefront/server/apps/floconsole/floconsole/di/application_container.py +++ b/wavefront/server/apps/floconsole/floconsole/di/application_container.py @@ -13,6 +13,7 @@ from floconsole.services.token_service import TokenService from floconsole.services.floware_proxy_service import FlowareProxyService from floconsole.services.app_service import AppService +from floconsole.services.user_service import UserService class ApplicationContainer(containers.DeclarativeContainer): @@ -49,6 +50,11 @@ class ApplicationContainer(containers.DeclarativeContainer): # services app_service = providers.Singleton(AppService, app_repository=app_repository) + user_service = providers.Factory( + UserService, + user_repository=user_repository, + ) + kms_service = providers.Selector( config.jwt_token.enable_cloud_kms, true=providers.Singleton( diff --git a/wavefront/server/apps/floconsole/floconsole/services/app_service.py b/wavefront/server/apps/floconsole/floconsole/services/app_service.py index 96c98769..103990ec 100644 --- a/wavefront/server/apps/floconsole/floconsole/services/app_service.py +++ b/wavefront/server/apps/floconsole/floconsole/services/app_service.py @@ -29,10 +29,9 @@ async def get_app_by_name(self, app_name: str) -> Optional[App]: async def create_app( self, app_name: str, - app_url: Optional[str] = None, + public_url: Optional[str] = None, + private_url: Optional[str] = None, status: str = 'in_progress', - app_secret: Optional[str] = None, - app_key: Optional[str] = None, deployment_type: str = 'manual', type: str = 'custom', config: dict = {}, @@ -40,10 +39,9 @@ async def create_app( """Create a new app""" result = await self.app_repository.create( app_name=app_name, - app_url=app_url, + public_url=public_url, + private_url=private_url, status=status, - app_secret=app_secret, - app_key=app_key, deployment_type=deployment_type, type=type, config=config, diff --git a/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py b/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py index 5efa42a2..d0571b41 100644 --- a/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py +++ b/wavefront/server/apps/floconsole/floconsole/services/floware_proxy_service.py @@ -1,11 +1,9 @@ from dataclasses import dataclass # from floconsole.constants.app import AppDeploymentType -from floconsole.constants.auth import SERVICE_AUTH_ROLE_ID, RootfloHeaders +from floconsole.constants.auth import RootfloHeaders import httpx -import jwt import os -from datetime import datetime, timedelta from fastapi import Request from fastapi.responses import Response, StreamingResponse @@ -39,40 +37,16 @@ def __init__( self.temporary_token_expiry = int(temporary_token_expiry) self.passthrough_secret = os.getenv('PASSTHROUGH_SECRET') - async def _get_app_base_url(self, app_url: str, app_id: str) -> str: + async def _get_app_base_url(self, private_url: str, app_id: str) -> str: """Get app base URL - used for both floware URL and JWT audience""" - if app_url.startswith('http'): - return app_url.rstrip('/') + if private_url.startswith('http'): + return private_url.rstrip('/') elif self.is_dev and 'localhost' in app_id: return f'http://{app_id}' elif self.is_dev and 'host.docker.internal' in app_id: return f'http://{app_id}' else: - return app_url.rstrip('/') - - async def _generate_service_token( - self, session: UserSession, app, app_base_url: str - ) -> str: - """Generate T2 service token using app secret""" - now = datetime.now() - - # Create service token with console issuer and app-specific audience - payload = { - 'iss': self.service_issuer, - 'aud': app_base_url, - 'iat': int(now.timestamp()), - 'exp': int( - (now + timedelta(seconds=self.temporary_token_expiry)).timestamp() - ), # Short-lived - 'sub': session.user_id, - 'user_id': session.user_id, - 'role_id': SERVICE_AUTH_ROLE_ID, - 'service_auth': True, # Mark as service-to-service token - } - - # Sign with app-specific secret - service_token = jwt.encode(payload, app.app_secret, algorithm='HS256') - return f'{self.token_prefix}{service_token}' + return private_url.rstrip('/') async def proxy_request( self, method: str, app_id: str, path: str, request: Request @@ -101,18 +75,15 @@ async def proxy_request( raise ValueError(f'Invalid app_id format: {app_id}') # if app.deployment_type == AppDeploymentType.MANUAL.value: - # app_base_url = await self._get_app_base_url(app.app_url, app_id) + # app_base_url = await self._get_app_base_url(app.private_url, app_id) # else: # app_base_url = await self._get_app_base_url( # 'https://' + app.app_name + '-floware.apps.rootflo.ai', app_id # ) - app_base_url = await self._get_app_base_url(app.app_url, app_id) - - # Step 4: Generate T2 service token with app-specific secret - # service_token = await self._generate_service_token(session, app, app_base_url) + app_base_url = await self._get_app_base_url(app.private_url, app_id) - # Step 5: Prepare request to floware + # Step 3: Prepare request to floware floware_url = f'{app_base_url}/floware/{path}' # Copy headers from original request, excluding Authorization @@ -122,21 +93,18 @@ async def proxy_request( if key.lower() not in ['authorization', 'host', 'content-length'] } - # Add service authentication headers using app-specific credentials - # headers[RootfloHeaders.CLIENT_KEY] = app.app_key - # headers['Authorization'] = f'Bearer {service_token}' headers['Content-Type'] = request.headers.get( 'Content-Type', 'application/json' ) - # Add passthrough header for non-production environments + # Step 4: Add passthrough header for non-production environments if self.app_env != 'production' and self.passthrough_secret: headers[RootfloHeaders.PASSTHROUGH] = self.passthrough_secret # Copy query parameters query_params = dict(request.query_params) - # Detect if streaming (SSE) is needed + # Step 5: Detect if streaming (SSE) is needed is_streaming = 'text/event-stream' in request.headers.get('accept', '').lower() # Step 6: Make request to floware diff --git a/wavefront/server/apps/floconsole/floconsole/services/user_service.py b/wavefront/server/apps/floconsole/floconsole/services/user_service.py new file mode 100644 index 00000000..23873882 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/services/user_service.py @@ -0,0 +1,40 @@ +from typing import List, Optional +from uuid import UUID + +from floconsole.db.models.user import User +from floconsole.db.repositories.sql_alchemy_repository import SQLAlchemyRepository +from floconsole.utils.password_utils import hash_password + + +class UserService: + def __init__(self, user_repository: SQLAlchemyRepository[User]): + self.user_repository = user_repository + + async def get_all_users(self) -> List[User]: + """Get all non-deleted users""" + return await self.user_repository.find(deleted=False) + + async def update_user(self, user_id: UUID, **update_data) -> Optional[User]: + """Update user by ID""" + # Hash password if it's being updated + if 'password' in update_data: + update_data['password'] = hash_password(update_data['password']) + + if update_data: + result = await self.user_repository.find_one_and_update( + filters={'id': user_id, 'deleted': False}, refresh=True, **update_data + ) + return result + return None + + async def delete_user(self, user_id: UUID) -> Optional[User]: + """Soft delete user by ID""" + result = await self.user_repository.find_one_and_update( + filters={'id': user_id, 'deleted': False}, deleted=True + ) + return result + + async def count_super_admins(self, super_admin_emails: List[str]) -> int: + """Count users with super admin emails""" + users = await self.user_repository.find(email=super_admin_emails, deleted=False) + return len(users) diff --git a/wavefront/server/apps/floware/floware/server.py b/wavefront/server/apps/floware/floware/server.py index dee93e56..22ff515a 100644 --- a/wavefront/server/apps/floware/floware/server.py +++ b/wavefront/server/apps/floware/floware/server.py @@ -392,8 +392,7 @@ async def global_exception_handler(request: Request, exc: Exception): prometheus_middleware.http_errors_total.labels(**labels, status_code=500).inc() error_message = 'An unexpected error has occurred while performing this action, please try again' - if environment != 'production': - error_message += f' - {str(exc)}' + error_message += f' - {str(exc)}' request_id = getattr(request.state, 'request_id', get_current_request_id()) logger.error(f'Error in API call [Request ID: {request_id}]: {exc}', exc_info=True) diff --git a/wavefront/server/uv.lock b/wavefront/server/uv.lock index 779fad4b..b5d737f3 100644 --- a/wavefront/server/uv.lock +++ b/wavefront/server/uv.lock @@ -633,7 +633,7 @@ requires-dist = [ { name = "dependency-injector", specifier = ">=4.46.0,<5.0.0" }, { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, { name = "httpx", specifier = ">=0.27.0" }, - { name = "pipecat-ai", extras = ["websocket", "cartesia", "google", "silero", "deepgram", "groq", "runner"], specifier = "==0.0.91" }, + { name = "pipecat-ai", extras = ["websocket", "cartesia", "google", "silero", "deepgram", "groq", "runner"], specifier = "==0.0.97" }, { name = "pydantic", specifier = ">=2.0.0" }, { name = "python-dotenv", specifier = ">=1.1.0,<2.0.0" }, { name = "python-multipart", specifier = ">=0.0.9" }, @@ -3928,7 +3928,7 @@ wheels = [ [[package]] name = "pipecat-ai" -version = "0.0.91" +version = "0.0.97" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -3949,9 +3949,9 @@ dependencies = [ { name = "soxr" }, { name = "wait-for2", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/4c/9d3fabce446fa208a10009c6306cca4e032f591b22fb30a4db6c7fec6515/pipecat_ai-0.0.91.tar.gz", hash = "sha256:a21ce6fd062ffa4d4944e1a479418e3e1626429e346f077ff185da134c4e9bf3", size = 10700370, upload-time = "2025-10-22T02:11:52.816Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/23/affeb18644f9aa3d4a5cb36a99160290a27a2a321fc34f53ecef0ebf058a/pipecat_ai-0.0.97.tar.gz", hash = "sha256:71ce27d1b5c9353958e3f4ac0c8ca18b8c00840d0be3f33350bd138d7329c24d", size = 10768149, upload-time = "2025-12-05T23:53:11.077Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/4f/99336f563164b734b6849eefa6cf5bc26923b8b4ec6de0d85497a07008f6/pipecat_ai-0.0.91-py3-none-any.whl", hash = "sha256:2f70f22a4625d855ee5742cc7d654ea3ec7837fae9de3bd5cdea77a550080d14", size = 10381447, upload-time = "2025-10-22T02:11:50.436Z" }, + { url = "https://files.pythonhosted.org/packages/a0/9a/e71f7ec77153fafa509928414086310984f2cce39cad17b915974580208b/pipecat_ai-0.0.97-py3-none-any.whl", hash = "sha256:dc637121ed4aece2053194bf0e94be0b984f2bd887f83118092c1fbd223a9374", size = 10448878, upload-time = "2025-12-05T23:53:08.447Z" }, ] [package.optional-dependencies] @@ -3961,6 +3961,7 @@ cartesia = [ ] deepgram = [ { name = "deepgram-sdk" }, + { name = "websockets" }, ] google = [ { name = "google-cloud-speech" }, From c6d626b0aba6651d86ff6e037c5ab2fca3d9fb9b Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Thu, 11 Dec 2025 18:27:26 +0530 Subject: [PATCH 11/13] chore: remove build trigger url --- wavefront/server/apps/floconsole/floconsole/config.ini | 3 +++ .../apps/floconsole/floconsole/controllers/app_controller.py | 5 +++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/wavefront/server/apps/floconsole/floconsole/config.ini b/wavefront/server/apps/floconsole/floconsole/config.ini index bed37fee..d960b757 100644 --- a/wavefront/server/apps/floconsole/floconsole/config.ini +++ b/wavefront/server/apps/floconsole/floconsole/config.ini @@ -20,3 +20,6 @@ audience=${CONSOLE_JWT_AUDIENCE:https://console.rootflo.ai} [super_admin] email = ${SUPER_ADMIN_EMAIL:vishnu@rootflo.ai} + +[deployment] +build_trigger_url=${BUILD_TRIGGER_URL} diff --git a/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py index d2c95a15..050c4699 100644 --- a/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py +++ b/wavefront/server/apps/floconsole/floconsole/controllers/app_controller.py @@ -22,8 +22,6 @@ from floconsole.db.models.user import User from floconsole.constants.app import AppDeploymentType, AppStatus -build_trigger_url = 'https://cloudbuild.googleapis.com/v1/projects/aesy-330511/locations/asia-south1/triggers/new-app:webhook?key=AIzaSyA_cDcmEHojgD7SG2OI2_6DYSBMeLY8kWk&trigger=new-app&projectId=aesy-330511&secret=Buildtriggersecret' - app_router = APIRouter(prefix='/v1') @@ -97,6 +95,7 @@ async def create_app( Provide[CommonContainer.response_formatter] ), app_service: AppService = Depends(Provide[ApplicationContainer.app_service]), + config: dict = Depends(Provide[ApplicationContainer.config]), ): try: app = await app_service.get_app_by_name(app_data.app_name) @@ -134,6 +133,7 @@ async def create_app( }, } + build_trigger_url = config['deployment']['build_trigger_url'] response = requests.post(build_trigger_url, json=data) if response.status_code != 200: @@ -308,6 +308,7 @@ async def delete_app( }, } + build_trigger_url = config['deployment']['build_trigger_url'] response = requests.post(build_trigger_url, json=data) if response.status_code != 200: From 1d6c03bf18c3616d79b98c4f34f259eafd849077 Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Fri, 12 Dec 2025 11:53:31 +0530 Subject: [PATCH 12/13] chore(client): use public url in app card --- wavefront/client/src/components/AppCard.tsx | 2 +- wavefront/client/src/pages/apps/create.tsx | 2 +- wavefront/client/src/pages/apps/index.tsx | 18 +++++++++--------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/wavefront/client/src/components/AppCard.tsx b/wavefront/client/src/components/AppCard.tsx index a1b6016d..f9b2ca1b 100644 --- a/wavefront/client/src/components/AppCard.tsx +++ b/wavefront/client/src/components/AppCard.tsx @@ -50,7 +50,7 @@ const AppCard: React.FC = ({ app, onClick, onDeleteClick }) => {

{app.app_name}

- {app.app_url} + {app.public_url}

diff --git a/wavefront/client/src/pages/apps/create.tsx b/wavefront/client/src/pages/apps/create.tsx index 036b7623..45e70d37 100644 --- a/wavefront/client/src/pages/apps/create.tsx +++ b/wavefront/client/src/pages/apps/create.tsx @@ -181,7 +181,7 @@ const CreateApp: React.FC = () => { }} /> - Add local app + Create local app for development )} diff --git a/wavefront/client/src/pages/apps/index.tsx b/wavefront/client/src/pages/apps/index.tsx index bc11ed2a..4f69dbe3 100644 --- a/wavefront/client/src/pages/apps/index.tsx +++ b/wavefront/client/src/pages/apps/index.tsx @@ -70,6 +70,15 @@ const Dashboard: React.FC = () => { Your applications

+
+ +

+ Create new application +

+
{appsLoading ? (
@@ -84,15 +93,6 @@ const Dashboard: React.FC = () => { /> )) )} -
- -

- Create new application -

-
From 7013ec5d92fcd4d1d38fc9ebc76c6ecb9f8c68a2 Mon Sep 17 00:00:00 2001 From: vishnu r kumar Date: Fri, 12 Dec 2025 13:03:27 +0530 Subject: [PATCH 13/13] chore: remove depreacted image search module --- .../client/src/api/knowledge-base-service.ts | 24 +- .../client/src/components/ProtectedLayout.tsx | 1 + .../server/apps/floware/floware/server.py | 15 - wavefront/server/apps/floware/pyproject.toml | 2 - .../controllers/inference_controller.py | 10 - .../image_search_module/algorithms/base.py | 111 ---- .../algorithms/sift_matcher.py | 278 -------- .../controllers/image_search_controller.py | 159 ----- .../image_search_container.py | 96 --- .../image_search_module/models/ikb_models.py | 126 ---- .../models/search_request.py | 83 --- .../repositories/ikb_repository.py | 76 --- .../repositories/sift_features_repository.py | 52 -- .../services/algorithm_factory.py | 36 - .../services/algorithm_service.py | 37 -- .../services/ikb_service.py | 191 ------ .../services/image_matching_service.py | 104 --- .../services/reference_image_service.py | 244 ------- .../image_search_module/pyproject.toml | 46 -- .../image_search_module/tests/conftest.py | 35 - .../image_search_module/tests/db_setup.py | 123 ---- .../tests/test_crud_endpoints.py | 627 ------------------ .../tests/test_ikb_create_upload.py | 334 ---------- .../tests/test_image_controller.py | 203 ------ .../tests/test_images/local_search.sh | 150 ----- .../tests/test_images/staging_search.sh | 208 ------ .../controllers/inference_controller.py | 10 - .../knowledge_base_document_controller.py | 10 - wavefront/server/uv.lock | 106 --- 29 files changed, 11 insertions(+), 3486 deletions(-) delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/image_search_container.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/models/search_request.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py delete mode 100644 wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py delete mode 100644 wavefront/server/modules/image_search_module/pyproject.toml delete mode 100644 wavefront/server/modules/image_search_module/tests/conftest.py delete mode 100644 wavefront/server/modules/image_search_module/tests/db_setup.py delete mode 100644 wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py delete mode 100644 wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py delete mode 100644 wavefront/server/modules/image_search_module/tests/test_image_controller.py delete mode 100755 wavefront/server/modules/image_search_module/tests/test_images/local_search.sh delete mode 100755 wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh diff --git a/wavefront/client/src/api/knowledge-base-service.ts b/wavefront/client/src/api/knowledge-base-service.ts index ffa7cb17..b9177df5 100644 --- a/wavefront/client/src/api/knowledge-base-service.ts +++ b/wavefront/client/src/api/knowledge-base-service.ts @@ -159,27 +159,23 @@ export class KnowledgeBaseService { keywordWeight?: number, imageData?: string ): Promise { - let url = `/v1/:appId/floware/v1/knowledge-base/${kbId}/augment/${inferenceId}?query=${query}`; + const params: Record = { query }; - if (threshold) { - url += `&threshold=${threshold}`; - } - if (topK) { - url += `&top_k=${topK}`; - } - if (vectorWeight) { - url += `&vector_weight=${vectorWeight}`; - } - if (keywordWeight) { - url += `&keyword_weight=${keywordWeight}`; - } + if (threshold) params.threshold = threshold; + if (topK) params.top_k = topK; + if (vectorWeight) params.vector_weight = vectorWeight; + if (keywordWeight) params.keyword_weight = keywordWeight; const data: { image_data?: string } = {}; if (imageData) { data.image_data = imageData; } - const response: RagInferenceResponse = await this.http.post(url, data); + const response: RagInferenceResponse = await this.http.post( + `/v1/:appId/floware/v1/knowledge-base/${kbId}/augment/${inferenceId}`, + data, + { params } + ); return response; } diff --git a/wavefront/client/src/components/ProtectedLayout.tsx b/wavefront/client/src/components/ProtectedLayout.tsx index 46c21a1f..21ff9aa8 100644 --- a/wavefront/client/src/components/ProtectedLayout.tsx +++ b/wavefront/client/src/components/ProtectedLayout.tsx @@ -16,6 +16,7 @@ const ProtectedLayout = ({ if (!token) { setAuthenticatedState(false); navigate("/login"); + return; } else { setAuthenticatedState(true); } diff --git a/wavefront/server/apps/floware/floware/server.py b/wavefront/server/apps/floware/floware/server.py index 22ff515a..5ff8a377 100644 --- a/wavefront/server/apps/floware/floware/server.py +++ b/wavefront/server/apps/floware/floware/server.py @@ -80,8 +80,6 @@ from llm_inference_config_module.controllers.inference_proxy_controller import ( inference_proxy_router, ) -from image_search_module.controllers.image_search_controller import image_search_router -from image_search_module.image_search_container import ImageSearchContainer from tools_module.controllers.tools_controller import tools_router from tools_module.tools_container import ToolsContainer from voice_agents_module.voice_agents_container import VoiceAgentsContainer @@ -173,11 +171,6 @@ cache_manager=db_repo_container.cache_manager, ) -image_search_container = ImageSearchContainer( - db_client=db_repo_container.db_client, - cloud_storage_manager=common_container.cloud_storage_manager, -) - # API Services Container api_services_container: ApiServicesContainer = create_api_services_container( api_service_repository=db_repo_container.api_services_repository, @@ -371,7 +364,6 @@ async def metrics(request: Request): app.include_router(llm_inference_config_router, prefix='/floware') app.include_router(inference_proxy_router, prefix='/floware') -app.include_router(image_search_router, prefix='/floware') app.include_router(tools_router, prefix='/floware') app.include_router(telephony_config_router, prefix='/floware') app.include_router(tts_config_router, prefix='/floware') @@ -411,7 +403,6 @@ async def global_exception_handler(request: Request, exc: Exception): modules=[__name__], packages=[ 'product_analysis_module.product_analysis_service', - 'image_search_module.services', ], ) @@ -474,7 +465,6 @@ async def global_exception_handler(request: Request, exc: Exception): 'inference_module.controllers', 'llm_inference_config_module.controllers', 'tools_module.controllers', - 'image_search_module.controllers', 'voice_agents_module.controllers', ], ) @@ -528,11 +518,6 @@ async def global_exception_handler(request: Request, exc: Exception): ], ) -image_search_container.wire( - modules=[__name__], - packages=['image_search_module.controllers'], -) - api_services_container.wire( modules=[__name__], packages=['api_services_module.core'], diff --git a/wavefront/server/apps/floware/pyproject.toml b/wavefront/server/apps/floware/pyproject.toml index 834c5c2d..9f00fa27 100644 --- a/wavefront/server/apps/floware/pyproject.toml +++ b/wavefront/server/apps/floware/pyproject.toml @@ -21,7 +21,6 @@ dependencies = [ "inference-module", "llm-inference-config-module", "tools-module", - "image-search-module", "api-services-module", "voice-agents-module", @@ -48,7 +47,6 @@ agents-module = {workspace = true} inference-module = {workspace = true} llm-inference-config-module = {workspace = true} tools-module = {workspace = true} -image-search-module = {workspace = true} api-services-module = {workspace = true} voice-agents-module = {workspace = true} diff --git a/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py b/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py index 067b0350..e9a5e82a 100644 --- a/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py +++ b/wavefront/server/apps/inference_app/inference_app/controllers/inference_controller.py @@ -44,16 +44,6 @@ class ImagePayload(BaseModel): inference_app_router = APIRouter() -async def handle_database_error(session, error_msg: str, error) -> JSONResponse: - """Handle database errors and return appropriate response.""" - await session.rollback() - logger.error(f'{error_msg} with error as {str(error)}') - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content=ResponseFormatter.buildErrorResponse(str(error)), - ) - - @inference_app_router.post('/v1/model-repository/model/{model_id}/infer') @inject async def generic_inference_handler( diff --git a/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py b/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py deleted file mode 100644 index ab92a93f..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/algorithms/base.py +++ /dev/null @@ -1,111 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Dict, List, Any -from dataclasses import dataclass -from enum import Enum - - -class AlgorithmType(Enum): - """Supported matching algorithms""" - - SIFT = 'sift' - # SAM_DINOV2 = "sam_dinov2" - - -@dataclass -class MatchResult: - """Standardized match result across all algorithms""" - - algorithm_type: str - reference_id: str - match_score: float - is_match: bool - confidence: float - processing_time_ms: float - metadata: Dict[str, Any] - - def to_dict(self) -> Dict[str, Any]: - return { - 'algorithm_type': self.algorithm_type, - 'reference_id': self.reference_id, - 'match_score': self.match_score, - 'is_match': self.is_match, - 'confidence': self.confidence, - 'processing_time_ms': self.processing_time_ms, - 'metadata': self.metadata, - } - - -@dataclass -class AlgorithmInfo: - """Algorithm metadata and capabilities""" - - name: str - version: str - description: str - supported_formats: List[str] - performance_characteristics: Dict[str, Any] - requirements: Dict[str, Any] - - -class ImageMatchingAlgorithm(ABC): - """Abstract base class for all image matching algorithms""" - - def __init__(self, config: Dict[str, Any]): - self.config = config - self.algorithm_type = self.__class__.__name__.lower().replace('matcher', '') - - @abstractmethod - def extract_features(self, image_bytes: bytes) -> Any: - """ - Extract features from image bytes - - Args: - image_bytes: Raw image data - - Returns: - Algorithm-specific feature representation - """ - pass - - @abstractmethod - def match_against_reference( - self, query_features: Any, reference_features: Any, reference_id: str - ) -> MatchResult: - """ - Match query features against single reference - - Args: - query_features: Features extracted from query image - reference_features: Features from reference image - reference_id: Unique identifier for reference - - Returns: - MatchResult with similarity score and metadata - """ - pass - - @abstractmethod - def batch_match( - self, query_features: Any, reference_features_map: Dict[str, Any] - ) -> List[MatchResult]: - """ - Efficiently match query against multiple references - - Args: - query_features: Features from query image - reference_features_map: Dict of {reference_id: features} - - Returns: - List of MatchResult objects - """ - pass - - @abstractmethod - def get_algorithm_info(self) -> AlgorithmInfo: - """Return algorithm metadata and capabilities""" - pass - - def preprocess_image(self, image_bytes: bytes, target_width: int = 800) -> Any: - """Common image preprocessing logic""" - # This would contain shared preprocessing logic - pass diff --git a/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py b/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py deleted file mode 100644 index fea66a7c..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/algorithms/sift_matcher.py +++ /dev/null @@ -1,278 +0,0 @@ -import cv2 -import numpy as np -import time -from typing import Dict, List, Any -from dataclasses import dataclass - -from image_search_module.algorithms.base import ( - ImageMatchingAlgorithm, - MatchResult, - AlgorithmInfo, -) - - -@dataclass -class SIFTFeatures: - """SIFT-specific feature representation""" - - keypoints: List[cv2.KeyPoint] - descriptors: np.ndarray - image_shape: tuple - - def to_dict(self) -> Dict[str, Any]: - """Serialize for storage""" - return { - 'keypoints': [ - { - 'pt': kp.pt, - 'size': kp.size, - 'angle': kp.angle, - 'response': kp.response, - 'octave': kp.octave, - 'class_id': kp.class_id, - } - for kp in self.keypoints - ], - 'descriptors': self.descriptors.tolist() - if self.descriptors is not None - else None, - 'image_shape': self.image_shape, - } - - @classmethod - def from_dict(cls, data: Dict[str, Any]) -> 'SIFTFeatures': - """Deserialize from storage""" - keypoints = [] - for kp_data in data['keypoints']: - kp = cv2.KeyPoint( - x=kp_data['pt'][0], - y=kp_data['pt'][1], - size=kp_data['size'], - angle=kp_data['angle'], - response=kp_data['response'], - octave=kp_data['octave'], - class_id=kp_data['class_id'], - ) - keypoints.append(kp) - - descriptors = ( - np.array(data['descriptors'], dtype=np.float32) - if data['descriptors'] - else None - ) - return cls( - keypoints=keypoints, - descriptors=descriptors, - image_shape=data['image_shape'], - ) - - -class SIFTMatcher(ImageMatchingAlgorithm): - """SIFT-based image matching implementation""" - - def __init__(self, config: Dict[str, Any]): - super().__init__(config) - self.max_features = config.get('max_features', 5000) - self.lowe_ratio = config.get('lowe_ratio', 0.75) - self.match_threshold = config.get('match_threshold', 10) - self.min_homography_matches = config.get('min_homography_matches', 4) - self.target_width = config.get('target_width', 800) - - self.sift = cv2.SIFT_create(nfeatures=self.max_features) - - def extract_features(self, image_bytes: bytes) -> SIFTFeatures: - """Extract SIFT features from image""" - try: - # Convert bytes to image - nparr = np.frombuffer(image_bytes, np.uint8) - image = cv2.imdecode(nparr, cv2.IMREAD_GRAYSCALE) - - if image is None: - raise ValueError('Could not decode image') - - # Preprocess image - processed_image = self._preprocess_image(image) - - # Extract SIFT features - keypoints, descriptors = self.sift.detectAndCompute(processed_image, None) - - return SIFTFeatures( - keypoints=keypoints, - descriptors=descriptors, - image_shape=processed_image.shape, - ) - - except Exception as e: - raise RuntimeError(f'SIFT feature extraction failed: {e}') - - def match_against_reference( - self, - query_features: SIFTFeatures, - reference_features: SIFTFeatures, - reference_id: str, - ) -> MatchResult: - """Match SIFT features against single reference""" - - start_time = time.perf_counter() - - try: - # Perform feature matching - matches = self._match_features( - query_features.descriptors, reference_features.descriptors - ) - - # Verify with homography if enough matches - inlier_matches, homography, is_valid = self._verify_homography( - query_features.keypoints, reference_features.keypoints, matches - ) - - match_score = len(inlier_matches) - is_match = match_score >= self.match_threshold and is_valid - confidence = min(match_score / (self.match_threshold * 2), 1.0) - - end_time = time.perf_counter() - processing_time_ms = (end_time - start_time) * 1000 - - return MatchResult( - algorithm_type=self.algorithm_type, - reference_id=reference_id, - match_score=match_score, - is_match=is_match, - confidence=confidence, - processing_time_ms=processing_time_ms, - metadata={ - 'total_matches': len(matches), - 'inlier_matches': len(inlier_matches), - 'homography_valid': is_valid, - 'lowe_ratio': self.lowe_ratio, - }, - ) - - except Exception as e: - return MatchResult( - algorithm_type=self.algorithm_type, - reference_id=reference_id, - match_score=0.0, - is_match=False, - confidence=0.0, - processing_time_ms=0.0, - metadata={'error': str(e)}, - ) - - def batch_match( - self, - query_features: SIFTFeatures, - reference_features_map: Dict[str, SIFTFeatures], - ) -> List[MatchResult]: - """Batch match against multiple references""" - - results = [] - for ref_id, ref_features in reference_features_map.items(): - result = self.match_against_reference(query_features, ref_features, ref_id) - results.append(result) - - return results - - def get_algorithm_info(self) -> AlgorithmInfo: - """Return SIFT algorithm information""" - return AlgorithmInfo( - name='SIFT', - version='1.0.0', - description='Scale-Invariant Feature Transform for feature-based matching', - supported_formats=['jpg', 'jpeg', 'png', 'bmp', 'tiff'], - performance_characteristics={ - 'rotation_invariant': True, - 'scale_invariant': True, - 'illumination_robust': True, - 'typical_processing_time_ms': '100-500', - 'memory_usage': 'moderate', - }, - requirements={ - 'opencv': '>=4.8.0', - 'min_image_size': '100x100', - 'recommended_image_size': '800x600', - }, - ) - - def _preprocess_image(self, image: np.ndarray) -> np.ndarray: - """Preprocess image for SIFT""" - # Resize if too large - if image.shape[1] > self.target_width: - scale = self.target_width / image.shape[1] - new_height = int(image.shape[0] * scale) - image = cv2.resize( - image, (self.target_width, new_height), interpolation=cv2.INTER_AREA - ) - - # Apply CLAHE for contrast enhancement - clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) - image = clahe.apply(image) - - # Apply slight Gaussian blur - image = cv2.GaussianBlur(image, (3, 3), 0.5) - - return image - - def _match_features(self, desc1: np.ndarray, desc2: np.ndarray) -> List[cv2.DMatch]: - """Match SIFT descriptors using Lowe's ratio test""" - if desc1 is None or desc2 is None or len(desc1) < 2 or len(desc2) < 2: - return [] - - try: - bf = cv2.BFMatcher() - matches = bf.knnMatch(desc1, desc2, k=2) - - # Apply Lowe's ratio test - good_matches = [] - for match_pair in matches: - if len(match_pair) == 2: - m, n = match_pair - if m.distance < self.lowe_ratio * n.distance: - good_matches.append(m) - - return good_matches - - except Exception: - return [] - - def _verify_homography( - self, - kp1: List[cv2.KeyPoint], - kp2: List[cv2.KeyPoint], - matches: List[cv2.DMatch], - ) -> tuple: - """Verify matches using homography estimation""" - if len(matches) < self.min_homography_matches: - return matches, None, False - - try: - # Extract matched points - src_pts = np.float32([kp1[m.queryIdx].pt for m in matches]).reshape( - -1, 1, 2 - ) - dst_pts = np.float32([kp2[m.trainIdx].pt for m in matches]).reshape( - -1, 1, 2 - ) - - # Find homography - homography, mask = cv2.findHomography( - src_pts, dst_pts, cv2.RANSAC, 5.0, maxIters=5000, confidence=0.995 - ) - - if homography is not None: - # Filter inlier matches - inlier_matches = [matches[i] for i in range(len(matches)) if mask[i]] - - # Check homography quality - det = np.linalg.det(homography[:2, :2]) - is_valid = ( - 0.1 < abs(det) < 10 - and len(inlier_matches) >= self.min_homography_matches - ) - - return inlier_matches, homography, is_valid - - return matches, None, False - - except Exception: - return matches, None, False diff --git a/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py b/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py deleted file mode 100644 index da9d35a7..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/controllers/image_search_controller.py +++ /dev/null @@ -1,159 +0,0 @@ -from typing import List, Optional -from fastapi import APIRouter, Depends, Query -from fastapi.responses import JSONResponse -from fastapi import status - -from common_module.response_formatter import ResponseFormatter - -from common_module.common_container import CommonContainer -from dependency_injector.wiring import inject, Provide - -from image_search_module.image_search_container import ImageSearchContainer -from image_search_module.services.ikb_service import IKBService -from image_search_module.models.ikb_models import ( - CreateIKBRequest, - IKBInfo, - IKBType, - IKBImageAddRequest, - IKBSearchRequest, - IKBSearchResponse, -) - -image_search_router = APIRouter(prefix='/ikb') - - -# IKB Management Endpoints -@image_search_router.post('/create', response_model=IKBInfo) -@inject -async def create_ikb( - payload: CreateIKBRequest, - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """Create a new Image Knowledge Base""" - - ikb_info = await ikb_service.create_ikb(payload) - - return JSONResponse( - status_code=status.HTTP_201_CREATED, - content=response_formatter.buildSuccessResponse( - ikb_info.model_dump(mode='json') - ), - ) - - -@image_search_router.get('/', response_model=List[IKBInfo]) -@inject -async def list_ikbs( - ikb_type: Optional[IKBType] = Query(None, description='Filter by IKB type'), - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """List all Image Knowledge Bases""" - - ikbs = await ikb_service.list_ikbs(ikb_type=ikb_type) - - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse( - {'ikbs': [ikb.model_dump(mode='json') for ikb in ikbs]} - ), - ) - - -@image_search_router.get('/{ikb_id}', response_model=IKBInfo) -@inject -async def get_ikb( - ikb_id: str, - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """Get information about a specific IKB""" - ikb = await ikb_service.get_ikb(ikb_id) - - if not ikb: - return JSONResponse( - status_code=status.HTTP_404_NOT_FOUND, - content=response_formatter.buildErrorResponse( - f'IKB with ID {ikb_id} not found' - ), - ) - - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse(ikb.model_dump(mode='json')), - ) - - -# Image add and Search Endpoints -@image_search_router.post('/{ikb_id}/add') -@inject -async def add_image_to_ikb( - ikb_id: str, - payload: IKBImageAddRequest, - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """add an image to a specific IKB""" - result = await ikb_service.add_image_to_ikb(ikb_id, payload) - - return JSONResponse( - status_code=status.HTTP_201_CREATED, - content=response_formatter.buildSuccessResponse(result), - ) - - -@image_search_router.post('/{ikb_id}/search', response_model=IKBSearchResponse) -@inject -async def search_in_ikb( - ikb_id: str, - payload: IKBSearchRequest, - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """Search for similar images within a specific IKB""" - result = await ikb_service.search_in_ikb(ikb_id, payload) - - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse(result.dict()), - ) - - -@image_search_router.delete('/{ikb_id}') -@inject -async def delete_ikb( - ikb_id: str, - ikb_service: IKBService = Depends(Provide[ImageSearchContainer.ikb_service]), - response_formatter: ResponseFormatter = Depends( - Provide[CommonContainer.response_formatter] - ), -): - """Delete an IKB""" - - success = await ikb_service.delete_ikb(ikb_id) - - if not success: - return JSONResponse( - status_code=status.HTTP_404_NOT_FOUND, - content=response_formatter.buildErrorResponse( - f'IKB with ID {ikb_id} not found' - ), - ) - - return JSONResponse( - status_code=status.HTTP_200_OK, - content=response_formatter.buildSuccessResponse( - {'message': 'IKB deleted successfully'} - ), - ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py b/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py deleted file mode 100644 index 1a9988e5..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/image_search_container.py +++ /dev/null @@ -1,96 +0,0 @@ -from dependency_injector import containers, providers -from image_search_module.services.image_matching_service import ImageMatchingService -from image_search_module.services.reference_image_service import ReferenceImageService -from image_search_module.services.algorithm_factory import AlgorithmFactory -from image_search_module.services.algorithm_service import AlgorithmService -from image_search_module.services.ikb_service import IKBService -from image_search_module.algorithms.base import AlgorithmType -from image_search_module.repositories.sift_features_repository import ( - SIFTFeaturesRepository, -) -from db_repo_module.models.image_search_models import ( - ReferenceImageFeatures, - SIFTFeatures, -) -from db_repo_module.models.ikb_models import ImageKnowledgeBase -from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository -from image_search_module.repositories.ikb_repository import IKBRepository -import os -import yaml - - -class ImageSearchContainer(containers.DeclarativeContainer): - """Dependency injection container for image search module""" - - _container_dir = os.path.dirname(os.path.abspath(__file__)) - _config_path = os.path.join(_container_dir, 'config', 'algorithm_configs.yaml') - - with open(_config_path, 'r') as file: - config = yaml.safe_load(file) - - cloud_configs = providers.Configuration(ini_files=['config.ini']) - - cloud_storage_manager = providers.Dependency() - - db_client = providers.Dependency() - - active_algorithm_type = providers.Factory( - AlgorithmType, config['service']['active_algorithm'] - ) - - reference_features_repository = providers.Singleton( - SQLAlchemyRepository[ReferenceImageFeatures], - model=ReferenceImageFeatures, - db_client=db_client, - ) - - ikb_repository_db = providers.Singleton( - SQLAlchemyRepository[ImageKnowledgeBase], - model=ImageKnowledgeBase, - db_client=db_client, - ) - - ikb_repository = providers.Singleton( - IKBRepository, - db_repository=ikb_repository_db, - ) - - sift_features_repository = providers.Singleton( - SIFTFeaturesRepository, - model=SIFTFeatures, - db_client=db_client, - ) - - # Core services - algorithm_factory = providers.Singleton(AlgorithmFactory) - - algorithm_service = providers.Singleton( - AlgorithmService, algorithm_factory=algorithm_factory - ) - - reference_image_service = providers.Singleton( - ReferenceImageService, - features_repository=reference_features_repository, - sift_features_repository=sift_features_repository, - algorithm_service=algorithm_service, - cloud_storage_manager=cloud_storage_manager, - bucket_name=cloud_configs.image_search.reference_images_bucket, - ) - - # Main image matching service - image_matching_service = providers.Singleton( - ImageMatchingService, - algorithm_factory=algorithm_factory, - reference_service=reference_image_service, - active_algorithm_type=active_algorithm_type, - algorithm_config=config['algorithms'], - max_results=config['service']['max_results'], - ) - - # IKB service - ikb_service = providers.Singleton( - IKBService, - image_matching_service=image_matching_service, - reference_image_service=reference_image_service, - ikb_repository=ikb_repository, - ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py b/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py deleted file mode 100644 index 45a540e2..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/models/ikb_models.py +++ /dev/null @@ -1,126 +0,0 @@ -from typing import List, Optional, Dict, Any -from pydantic import BaseModel, Field, field_validator -from datetime import datetime -from enum import Enum - -from image_search_module.algorithms.base import AlgorithmType - - -class IKBStatus(str, Enum): - """Status of an Image Knowledge Base""" - - ACTIVE = 'active' - INACTIVE = 'inactive' - - -class IKBType(str, Enum): - """Types of Image Knowledge Bases""" - - GOLD_MATCHING = 'gold_matching' - PHOTO_MATCHING = 'photo_matching' - - -class CreateIKBRequest(BaseModel): - """Request to create a new Image Knowledge Base""" - - name: str = Field(..., description='Name of the IKB', min_length=1, max_length=100) - description: Optional[str] = Field( - None, description='Description of the IKB', max_length=500 - ) - ikb_type: IKBType = Field(..., description='Type of the IKB') - algorithm_type: AlgorithmType = Field( - ..., description='Algorithm to use for this IKB' - ) - config: Dict[str, Any] = Field( - default_factory=dict, description='Algorithm-specific configuration(required)' - ) - - @field_validator('name') - @classmethod - def validate_name(cls, v): - if not v.strip(): - raise ValueError('Name cannot be empty') - return v.strip() - - -class IKBInfo(BaseModel): - """Information about an Image Knowledge Base""" - - ikb_id: str = Field(..., description='Unique identifier for the IKB') - name: str = Field(..., description='Name of the IKB') - description: Optional[str] = Field(None, description='Description of the IKB') - ikb_type: IKBType = Field(..., description='Type of the IKB') - algorithm_type: AlgorithmType = Field(..., description='Algorithm used by this IKB') - status: IKBStatus = Field(..., description='Current status of the IKB') - image_count: int = Field(0, description='Number of images in this IKB') - created_at: datetime = Field(..., description='When the IKB was created') - updated_at: datetime = Field(..., description='When the IKB was last updated') - config: Dict[str, Any] = Field( - default_factory=dict, description='Algorithm-specific configuration' - ) - - -class IKBImageAddRequest(BaseModel): - """Request to add an image to a specific IKB""" - - image_data: str = Field(..., description='Base64 encoded image data URL') - reference_id: Optional[str] = Field( - None, description='Custom reference ID (auto-generated if not provided)' - ) - metadata: Optional[Dict[str, Any]] = Field( - default_factory=dict, description='Additional metadata for the image' - ) - - @field_validator('image_data') - @classmethod - def validate_image_data(cls, v): - """Validate that image_data is a proper base64 data URL""" - if not v.startswith('data:image/'): - raise ValueError( - 'Image data must be a base64 data URL (data:image/...;base64,...)' - ) - - if ';base64,' not in v: - raise ValueError('Image data must be base64 encoded') - - return v - - -class IKBSearchRequest(BaseModel): - """Request to search within a specific IKB""" - - image_data: str = Field(..., description='Base64 encoded image data URL') - max_results: int = Field( - 10, description='Maximum number of results to return', ge=1, le=100 - ) - threshold: Optional[float] = Field(None, description='Minimum similarity threshold') - - @field_validator('image_data') - @classmethod - def validate_image_data(cls, v): - """Validate that image_data is a proper base64 data URL""" - if not v.startswith('data:image/'): - raise ValueError( - 'Image data must be a base64 data URL (data:image/...;base64,...)' - ) - - if ';base64,' not in v: - raise ValueError('Image data must be base64 encoded') - - return v - - -class IKBSearchResponse(BaseModel): - """Response from IKB search""" - - query_id: str = Field(..., description='Unique identifier for this search query') - ikb_id: str = Field(..., description='ID of the IKB that was searched') - ikb_name: str = Field(..., description='Name of the IKB that was searched') - algorithm_used: str = Field(..., description='Algorithm used for matching') - matches: List[Dict[str, Any]] = Field(..., description='List of matching results') - total_images_searched: int = Field( - ..., description='Total number of images in the IKB' - ) - processing_time_ms: float = Field( - ..., description='Total processing time in milliseconds' - ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py b/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py deleted file mode 100644 index 72fb9a52..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/models/search_request.py +++ /dev/null @@ -1,83 +0,0 @@ -from typing import List, Optional, Dict, Any -from pydantic import BaseModel, Field, validator -import base64 -import re - - -class ImageSearchRequest(BaseModel): - """Request model for image search with base64 data URL""" - - image_data: str = Field( - ..., description='Base64 encoded image data URL (data:image/...;base64,...)' - ) - algorithm_type: Optional[str] = Field( - None, description='Algorithm type to use (sift, sam_dinov2, custom_model)' - ) - - @validator('image_data') - def validate_image_data(cls, v): - """Validate that image_data is a proper base64 data URL""" - if not v.startswith('data:image/'): - raise ValueError( - 'Image data must be a base64 data URL (data:image/...;base64,...)' - ) - - if ';base64,' not in v: - raise ValueError('Image data must be base64 encoded') - - # Extract and validate base64 data - try: - data_url_pattern = r'^data:(image/\w+);base64,(.+)' - match = re.match(data_url_pattern, v) - if not match: - raise ValueError('Invalid data URL format') - - # Decode to check size and validity - base64_data = match.group(2) - image_bytes = base64.b64decode(base64_data) - - # Check size limit (20MB original = ~26.6MB base64) - MAX_SIZE = 20 * 1024 * 1024 # 20MB - if len(image_bytes) > MAX_SIZE: - raise ValueError( - f'Image too large. Maximum size: {MAX_SIZE // (1024*1024)}MB' - ) - - return v - - except base64.binascii.Error: - raise ValueError('Invalid base64 encoding') - except Exception as e: - raise ValueError(f'Invalid image data: {str(e)}') - - @validator('algorithm_type') - def validate_algorithm_type(cls, v): - """Validate algorithm type if provided""" - if v is not None: - valid_types = ['sift'] - if v not in valid_types: - raise ValueError( - f'Invalid algorithm type. Must be one of: {valid_types}' - ) - return v - - -class MatchResult(BaseModel): - """Individual match result""" - - algorithm_type: str - reference_id: str - match_score: float - is_match: bool - confidence: float - processing_time_ms: float - metadata: Dict[str, Any] - - -class ImageSearchResponse(BaseModel): - """Response model for image search""" - - query_id: str - matches: List[MatchResult] - algorithm_used: str - processing_time_ms: float diff --git a/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py b/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py deleted file mode 100644 index e619a39d..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/repositories/ikb_repository.py +++ /dev/null @@ -1,76 +0,0 @@ -from typing import List, Optional -from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository -from db_repo_module.models.ikb_models import ImageKnowledgeBase -from image_search_module.models.ikb_models import IKBInfo, IKBType, IKBStatus -from image_search_module.algorithms.base import AlgorithmType - - -class IKBRepository: - """Repository for Image Knowledge Base operations""" - - def __init__(self, db_repository: SQLAlchemyRepository[ImageKnowledgeBase]): - self.db_repository = db_repository - - async def create_ikb(self, ikb_info: IKBInfo) -> ImageKnowledgeBase: - """Create a new IKB in the database""" - return await self.db_repository.create( - ikb_id=ikb_info.ikb_id, - name=ikb_info.name, - description=ikb_info.description, - ikb_type=ikb_info.ikb_type.value, - algorithm_type=ikb_info.algorithm_type.value, - status=ikb_info.status.value, - config=ikb_info.config, - image_count=ikb_info.image_count, - ) - - async def get_ikb(self, ikb_id: str) -> Optional[ImageKnowledgeBase]: - """Get IKB by ID""" - return await self.db_repository.find_one(ikb_id=ikb_id) - - async def list_ikbs( - self, ikb_type: Optional[IKBType] = None - ) -> List[ImageKnowledgeBase]: - """List all IKBs, optionally filtered by type""" - filters = {} - if ikb_type: - filters['ikb_type'] = ikb_type.value - - return await self.db_repository.find(**filters) - - async def update_ikb(self, ikb_id: str, **updates) -> Optional[ImageKnowledgeBase]: - """Update IKB""" - # Use find_one_and_update method - filters = {'ikb_id': ikb_id} - return await self.db_repository.find_one_and_update(filters, **updates) - - async def delete_ikb(self, ikb_id: str) -> bool: - """Delete IKB""" - # Use delete_all method with filter - await self.db_repository.delete_all(ikb_id=ikb_id) - return True - - async def increment_image_count(self, ikb_id: str) -> bool: - """Increment the image count for an IKB""" - # Get current IKB - ikb = await self.get_ikb(ikb_id) - if ikb: - # Update with incremented count - await self.update_ikb(ikb_id, image_count=ikb.image_count + 1) - return True - return False - - def _convert_to_ikb_info(self, ikb_db: ImageKnowledgeBase) -> IKBInfo: - """Convert database model to IKBInfo""" - return IKBInfo( - ikb_id=ikb_db.ikb_id, - name=ikb_db.name, - description=ikb_db.description, - ikb_type=IKBType(ikb_db.ikb_type), - algorithm_type=AlgorithmType(ikb_db.algorithm_type), - status=IKBStatus(ikb_db.status), - image_count=ikb_db.image_count, - created_at=ikb_db.created_at, - updated_at=ikb_db.updated_at, - config=ikb_db.config, - ) diff --git a/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py b/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py deleted file mode 100644 index 487c1e2f..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/repositories/sift_features_repository.py +++ /dev/null @@ -1,52 +0,0 @@ -from typing import List -from db_repo_module.models.image_search_models import ReferenceImageFeatures -from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository -from db_repo_module.models.image_search_models import SIFTFeatures -from sqlalchemy import select - - -class SIFTFeaturesRepository(SQLAlchemyRepository[SIFTFeatures]): - """Repository for SIFT features""" - - async def create_sift_features( - self, - reference_image_id: str, - keypoints: List[dict], - descriptors: List[List[float]], - ) -> List[SIFTFeatures]: - """Create SIFT features for a reference image""" - sift_features = [] - - for i, (keypoint, descriptor) in enumerate(zip(keypoints, descriptors)): - # Create the feature using the parent's create method with keyword arguments - feature = await self.create( - reference_image_id=reference_image_id, - keypoint_id=i, # Ensure sequential ordering - x=keypoint['pt'][0], - y=keypoint['pt'][1], - size=keypoint['size'], - angle=keypoint['angle'], - response=keypoint['response'], - octave=keypoint['octave'], - class_id=keypoint['class_id'], - descriptor=descriptor, - ) - sift_features.append(feature) - - return sift_features - - async def get_features_by_ikb(self, ikb_id: str) -> List[SIFTFeatures]: - """Get SIFT features only from specific IKB""" - async with self.session() as session: - stmt = ( - select(SIFTFeatures) - .join( - ReferenceImageFeatures, - SIFTFeatures.reference_image_id - == ReferenceImageFeatures.reference_image_id, - ) - .where(ReferenceImageFeatures.ikb_id == ikb_id) - ) - - result = await session.execute(stmt) - return result.scalars().all() diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py deleted file mode 100644 index b61bc47c..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_factory.py +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Dict, Any -from image_search_module.algorithms.base import ImageMatchingAlgorithm, AlgorithmType -from image_search_module.algorithms.sift_matcher import SIFTMatcher - - -class AlgorithmFactory: - """Factory for creating algorithm instances""" - - def __init__(self): - self._algorithms = { - AlgorithmType.SIFT: SIFTMatcher, - # Add other algorithms here as you implement them - } - - def create_algorithm( - self, algorithm_type: AlgorithmType, config: Dict[str, Any] - ) -> ImageMatchingAlgorithm: - """ - Create an algorithm instance - - Args: - algorithm_type: Type of algorithm to create - config: Configuration for the algorithm - - Returns: - Algorithm instance - """ - if algorithm_type not in self._algorithms: - raise ValueError(f'Unsupported algorithm type: {algorithm_type}') - - algorithm_class = self._algorithms[algorithm_type] - return algorithm_class(config) - - def get_supported_algorithms(self) -> list: - """Get list of supported algorithm types""" - return list(self._algorithms.keys()) diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py deleted file mode 100644 index e703f55b..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/services/algorithm_service.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import Dict, Any -from image_search_module.services.algorithm_factory import AlgorithmFactory -from image_search_module.algorithms.base import AlgorithmType - - -class AlgorithmService: - """Service for algorithm-specific operations""" - - def __init__(self, algorithm_factory: AlgorithmFactory): - self.algorithm_factory = algorithm_factory - - def extract_features( - self, image_bytes: bytes, algorithm_type: str - ) -> Dict[str, Any]: - """Extract features using specified algorithm""" - # Convert string to enum - algo_enum = AlgorithmType(algorithm_type.lower()) - - # Create algorithm instance - algorithm = self.algorithm_factory.create_algorithm(algo_enum, {}) - - # Extract features - features = algorithm.extract_features(image_bytes) - - # Convert to serializable format - if hasattr(features, 'to_dict'): - features_dict = features.to_dict() - else: - features_dict = {'features': features} - - return { - 'features': features_dict, - 'algorithm_type': algorithm_type, - 'feature_count': len(features.keypoints) - if hasattr(features, 'keypoints') - else 0, - } diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py deleted file mode 100644 index d2af6160..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/services/ikb_service.py +++ /dev/null @@ -1,191 +0,0 @@ -from typing import List, Dict, Any, Optional -import uuid -import base64 -import re -from datetime import datetime -from common_module.log.logger import logger - -from image_search_module.models.ikb_models import ( - CreateIKBRequest, - IKBInfo, - IKBStatus, - IKBType, - IKBImageAddRequest, - IKBSearchRequest, - IKBSearchResponse, -) -from image_search_module.services.image_matching_service import ImageMatchingService -from image_search_module.services.reference_image_service import ReferenceImageService -from image_search_module.repositories.ikb_repository import IKBRepository - - -class IKBService: - """Production-ready service for managing Image Knowledge Bases""" - - def __init__( - self, - image_matching_service: ImageMatchingService, - reference_image_service: ReferenceImageService, - ikb_repository: IKBRepository, - ): - self.image_matching_service = image_matching_service - self.reference_image_service = reference_image_service - self.ikb_repository = ikb_repository - - async def create_ikb(self, payload: CreateIKBRequest) -> IKBInfo: - """Create a new Image Knowledge Base""" - ikb_id = str(uuid.uuid4()) - - ikb_info = IKBInfo( - ikb_id=ikb_id, - name=payload.name, - description=payload.description, - ikb_type=payload.ikb_type, - algorithm_type=payload.algorithm_type, - status=IKBStatus.ACTIVE, - image_count=0, - created_at=datetime.utcnow(), - updated_at=datetime.utcnow(), - config=payload.config or {}, - ) - - await self.ikb_repository.create_ikb(ikb_info) - - logger.info(f'Created new IKB: {ikb_info.name} (ID: {ikb_id})') - return ikb_info - - async def get_ikb(self, ikb_id: str) -> Optional[IKBInfo]: - """Get information about a specific IKB""" - ikb_db = await self.ikb_repository.get_ikb(ikb_id) - if not ikb_db: - return None - - return self.ikb_repository._convert_to_ikb_info(ikb_db) - - async def list_ikbs(self, ikb_type: Optional[IKBType] = None) -> List[IKBInfo]: - """List all IKBs, optionally filtered by type""" - ikb_dbs = await self.ikb_repository.list_ikbs(ikb_type=ikb_type) - - return [self.ikb_repository._convert_to_ikb_info(ikb_db) for ikb_db in ikb_dbs] - - async def update_ikb(self, ikb_id: str, **updates) -> Optional[IKBInfo]: - """Update an IKB""" - ikb_db = await self.ikb_repository.update_ikb(ikb_id, **updates) - if not ikb_db: - return None - - logger.info(f'Updated IKB: {ikb_db.name} (ID: {ikb_id})') - return self.ikb_repository._convert_to_ikb_info(ikb_db) - - async def delete_ikb(self, ikb_id: str) -> bool: - """Delete an IKB""" - success = await self.ikb_repository.delete_ikb(ikb_id) - if success: - logger.info(f'Deleted IKB (ID: {ikb_id})') - return success - - async def add_image_to_ikb( - self, ikb_id: str, payload: IKBImageAddRequest - ) -> Dict[str, Any]: - """add an image to a specific IKB""" - ikb = await self.get_ikb(ikb_id) - if not ikb: - raise ValueError(f'IKB with ID {ikb_id} not found') - - if ikb.status != IKBStatus.ACTIVE: - raise ValueError(f'IKB {ikb.name} is not active (status: {ikb.status})') - - # Decode base64 image - data_url_pattern = r'^data:(image/\w+);base64,(.+)' - match = re.match(data_url_pattern, payload.image_data) - if not match: - raise ValueError('Invalid image data format') - - image_bytes = base64.b64decode(match.group(2)) - - # Generate reference ID if not provided - reference_id = payload.reference_id or str(uuid.uuid4()) - - # Add reference image with IKB ID - result = await self.reference_image_service.add_reference_image( - image_bytes=image_bytes, - reference_image_id=reference_id, - algorithm_type=ikb.algorithm_type.value, - ikb_id=ikb_id, - metadata={ - **payload.metadata, - 'ikb_id': ikb_id, - 'ikb_name': ikb.name, - 'ikb_type': ikb.ikb_type.value, - }, - ) - - # Update IKB image count in database - await self.ikb_repository.increment_image_count(ikb_id) - - logger.info(f'added image to IKB {ikb.name}: {reference_id}') - - return { - 'reference_id': reference_id, - 'ikb_id': ikb_id, - 'ikb_name': ikb.name, - 'algorithm_type': ikb.algorithm_type.value, - 'extraction_results': result, - } - - async def search_in_ikb( - self, ikb_id: str, payload: IKBSearchRequest - ) -> IKBSearchResponse: - """Search for similar images within a specific IKB""" - ikb = await self.get_ikb(ikb_id) - if not ikb: - raise ValueError(f'IKB with ID {ikb_id} not found') - - if ikb.status != IKBStatus.ACTIVE: - raise ValueError(f'IKB {ikb.name} is not active (status: {ikb.status})') - - # Decode base64 image - data_url_pattern = r'^data:(image/\w+);base64,(.+)' - match = re.match(data_url_pattern, payload.image_data) - if not match: - raise ValueError('Invalid image data format') - - image_bytes = base64.b64decode(match.group(2)) - - # Generate query ID - query_id = str(uuid.uuid4()) - - # Perform matching using the IKB's algorithm - matching_result = await self.image_matching_service.match_image( - image_bytes=image_bytes, - ikb_id=ikb_id, - max_results=payload.max_results, - algorithm_type=ikb.algorithm_type, - ) - - # Filter results to only include images from this IKB - ikb_matches = [] - for match in matching_result: - # Get the reference image from database to check IKB ID - reference_image = ( - await self.reference_image_service.features_repository.find_one( - reference_image_id=match.reference_id - ) - ) - - if reference_image and reference_image.ikb_id == ikb_id: - ikb_matches.append(match.to_dict()) - - response = IKBSearchResponse( - query_id=query_id, - ikb_id=ikb_id, - ikb_name=ikb.name, - algorithm_used=ikb.algorithm_type.value, - matches=[match.to_dict() for match in matching_result], - total_images_searched=ikb.image_count, - processing_time_ms=sum(m.processing_time_ms for m in matching_result), - ) - - logger.info(f'Searched IKB {ikb.name}: found {len(ikb_matches)} matches') - - return response diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py deleted file mode 100644 index 1d669908..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/services/image_matching_service.py +++ /dev/null @@ -1,104 +0,0 @@ -from typing import List, Dict, Any, Optional -from common_module.log.logger import logger - -from image_search_module.algorithms.base import ( - ImageMatchingAlgorithm, - MatchResult, - AlgorithmType, -) -from image_search_module.services.algorithm_factory import AlgorithmFactory -from image_search_module.services.reference_image_service import ReferenceImageService - - -class ImageMatchingService: - """Main service for image matching operations""" - - def __init__( - self, - algorithm_factory: AlgorithmFactory, - reference_service: ReferenceImageService, - active_algorithm_type: AlgorithmType, - algorithm_config: Dict[str, Any], - max_results: int = 10, - ): - self.algorithm_factory = algorithm_factory - self.reference_service = reference_service - self.active_algorithm_type = active_algorithm_type - self.algorithm_config = algorithm_config - self.max_results = max_results - - # Initialize active algorithm - self.active_algorithm = self._create_active_algorithm() - - def _create_active_algorithm(self) -> ImageMatchingAlgorithm: - """Create the currently active algorithm instance""" - algo_config = self.algorithm_config.get(self.active_algorithm_type.value, {}) - return self.algorithm_factory.create_algorithm( - self.active_algorithm_type, algo_config - ) - - async def match_image( - self, - image_bytes: bytes, - ikb_id: str, - threshold: Optional[float] = None, - max_results: Optional[int] = None, - algorithm_type: Optional[AlgorithmType] = None, - ) -> List[MatchResult]: - """ - Main image matching method - """ - - # Use provided values or defaults - max_results = max_results or self.max_results - algorithm = ( - self.active_algorithm - if algorithm_type is None - else self.algorithm_factory.create_algorithm( - algorithm_type, self.algorithm_config.get(algorithm_type.value, {}) - ) - ) - - logger.info(f'Starting image matching with {algorithm.__class__.__name__}') - - # Extract features from query image - query_features = algorithm.extract_features(image_bytes) - logger.info('Query features extracted successfully') - - # Get reference features for this algorithm - algorithm_type_str = ( - algorithm_type.value if algorithm_type else self.active_algorithm_type.value - ) - reference_features = await self.reference_service.get_reference_features( - algorithm_type=algorithm_type_str, ikb_id=ikb_id - ) - logger.info(f'Retrieved {len(reference_features)} reference features') - - # Perform batch matching - all_matches = algorithm.batch_match(query_features, reference_features) - logger.info(f'Completed matching, found {len(all_matches)} comparisons') - - # Filter by threshold and sort - valid_matches = [match for match in all_matches if match.is_match] - - sorted_matches = sorted( - valid_matches, key=lambda x: x.match_score, reverse=True - )[:max_results] - - logger.info( - f'Returning {len(sorted_matches)} matches above threshold {threshold}' - ) - - return sorted_matches - - def get_algorithm_info( - self, algorithm_type: Optional[AlgorithmType] = None - ) -> Dict[str, Any]: - """Get information about an algorithm""" - - algo_type = algorithm_type or self.active_algorithm_type - algorithm = self.algorithm_factory.create_algorithm( - algo_type, self.algorithm_config.get(algo_type.value, {}) - ) - - return algorithm.get_algorithm_info().to_dict() diff --git a/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py b/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py deleted file mode 100644 index 6622c504..00000000 --- a/wavefront/server/modules/image_search_module/image_search_module/services/reference_image_service.py +++ /dev/null @@ -1,244 +0,0 @@ -from typing import List, Dict, Any, Optional -import cv2 -import numpy as np -from common_module.log.logger import logger - -from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository -from db_repo_module.models.image_search_models import ReferenceImageFeatures -from image_search_module.repositories.sift_features_repository import ( - SIFTFeaturesRepository, -) -from flo_cloud.cloud_storage import CloudStorageManager -from image_search_module.services.algorithm_service import AlgorithmService -from image_search_module.algorithms.sift_matcher import SIFTFeatures - - -class ReferenceImageService: - def __init__( - self, - features_repository: SQLAlchemyRepository[ReferenceImageFeatures], - sift_features_repository: SIFTFeaturesRepository, - algorithm_service: AlgorithmService, - cloud_storage_manager: CloudStorageManager, - bucket_name: str, - ): - self.cloud_storage_manager = cloud_storage_manager - self.features_repository = features_repository - self.sift_features_repository = sift_features_repository - self.algorithm_service = algorithm_service - self.cloud_storage_manager = cloud_storage_manager - self.bucket_name = bucket_name - - logger.info('ReferenceImageService initialized') - - async def add_reference_image( - self, - image_bytes: bytes, - reference_image_id: str, - algorithm_type: str = 'sift', - ikb_id: str = None, - metadata: dict = None, - ) -> Dict[str, Any]: - """ - Add a new reference image and extract features for specified algorithms - """ - - self.cloud_storage_manager.save_small_file( - file_content=image_bytes, - bucket_name=self.bucket_name, - key=reference_image_id, - ) - - logger.info(f'Uploaded reference image {reference_image_id} to cloud storage') - - # Extract features for the algorithm - extraction_results = {} - - features_data = self.algorithm_service.extract_features( - image_bytes, algorithm_type - ) - - # Store features in database - await self.features_repository.create( - reference_image_id=reference_image_id, - ikb_id=ikb_id, - algorithm_type=algorithm_type, - image_url=reference_image_id, - image_metadata=metadata or {}, - ) - - if algorithm_type.lower() == 'sift': - await self._store_sift_features(reference_image_id, features_data) - - extraction_results[algorithm_type] = { - 'status': 'success', - 'features_count': features_data.get('feature_count', 0), - 'extraction_time_ms': features_data.get('extraction_time_ms', 0), - } - - logger.info( - f'Extracted features for {reference_image_id} using {algorithm_type}' - ) - - return { - 'reference_image_id': reference_image_id, - 'algorithm_type': algorithm_type, - 'features_count': features_data.get('feature_count', 0), - 'stored_in': [ - 'ReferenceImageFeatures', - f'{algorithm_type.title()}Features', - ], - } - - async def _store_sift_features(self, reference_image_id: str, features_data: dict): - """Store SIFT features in the dedicated SIFTFeatures table""" - keypoints = features_data.get('features', {}).get('keypoints', []) - descriptors = features_data.get('features', {}).get('descriptors', []) - - await self.sift_features_repository.create_sift_features( - reference_image_id=reference_image_id, - keypoints=keypoints, - descriptors=descriptors, - ) - - logger.info(f'Stored {len(keypoints)} SIFT keypoints for {reference_image_id}') - - async def get_reference_features( - self, algorithm_type: str, ikb_id: str - ) -> Dict[str, SIFTFeatures]: - """ - Get all reference features for a specific algorithm type - - Args: - algorithm_type: Type of algorithm - - Returns: - Dictionary mapping reference_image_id to SIFTFeatures objects - """ - - if algorithm_type.lower() == 'sift': - # Get SIFT features from dedicated table - sift_features = await self.sift_features_repository.get_features_by_ikb( - ikb_id - ) - - # Group features by reference_image_id and sort by keypoint_id - grouped_features = {} - for feature in sift_features: - ref_id = feature.reference_image_id - - if ref_id not in grouped_features: - grouped_features[ref_id] = { - 'keypoints': [], - 'descriptors': [], - 'keypoint_data': [], # Store (keypoint_id, feature) pairs for sorting - } - - # Store keypoint data with ID for proper ordering - grouped_features[ref_id]['keypoint_data'].append( - ( - feature.keypoint_id, - { - 'pt': [feature.x, feature.y], - 'size': feature.size, - 'angle': feature.angle, - 'response': feature.response, - 'octave': feature.octave, - 'class_id': feature.class_id, - }, - feature.descriptor, - ) - ) - - # Convert to SIFTFeatures objects - sift_features_dict = {} - for ref_id, data in grouped_features.items(): - # Sort by keypoint_id to maintain order - sorted_data = sorted(data['keypoint_data'], key=lambda x: x[0]) - - # Extract keypoints and descriptors in correct order - keypoints = [] - descriptors = [] - - for keypoint_id, keypoint_data, descriptor in sorted_data: - # Create OpenCV KeyPoint object - kp = cv2.KeyPoint( - x=keypoint_data['pt'][0], - y=keypoint_data['pt'][1], - size=keypoint_data['size'], - angle=keypoint_data['angle'], - response=keypoint_data['response'], - octave=keypoint_data['octave'], - class_id=keypoint_data['class_id'], - ) - keypoints.append(kp) - descriptors.append(descriptor) - - # Convert descriptors to numpy array - - descriptors_array = np.array(descriptors, dtype=np.float32) - - # Create SIFTFeatures object with correct types - sift_features_dict[ref_id] = SIFTFeatures( - keypoints=keypoints, - descriptors=descriptors_array, - image_shape=(800, 600), # Default shape - ) - - logger.info( - f'Retrieved {len(sift_features_dict)} reference features for {algorithm_type}' - ) - return sift_features_dict - - else: - # Handle other algorithm types - logger.warning(f'Algorithm type {algorithm_type} not implemented yet') - return {} - - async def delete_reference_image( - self, reference_image_id: str, algorithm_types: Optional[List[str]] = None - ) -> Dict[str, Any]: - """ - Delete a reference image and its features - - Args: - reference_image_id: ID of the reference image - algorithm_types: Optional list of algorithm types to delete features for - - Returns: - Dictionary with deletion results - """ - # Delete from cloud storage - await self.cloud_storage_manager.delete_file( - bucket_name=self.bucket_name, file_path=reference_image_id - ) - - # Delete features from database - deleted_features = await self.features_repository.delete_by_reference_id( - reference_image_id, - algorithm_types[0] - if algorithm_types and len(algorithm_types) == 1 - else None, - ) - - result = { - 'reference_image_id': reference_image_id, - 'deleted_features_count': deleted_features, - 'deleted_from_storage': True, - } - - logger.info(f'Successfully deleted reference image {reference_image_id}') - return result - - async def ensure_features_available(self, algorithm_type: str) -> bool: - features = await self.get_reference_features(algorithm_type) - is_available = len(features) > 0 - - if not is_available: - logger.warning(f'No reference features available for {algorithm_type}') - else: - logger.info( - f'Features available for {algorithm_type}: {len(features)} references' - ) - - return is_available diff --git a/wavefront/server/modules/image_search_module/pyproject.toml b/wavefront/server/modules/image_search_module/pyproject.toml deleted file mode 100644 index 8dc3103d..00000000 --- a/wavefront/server/modules/image_search_module/pyproject.toml +++ /dev/null @@ -1,46 +0,0 @@ -[project] -name = "image-search-module" -version = "0.1.0" -description = "Generic image search and matching module" -authors = [ - { name = "rootflo engineering", email = "engineering@rootflo.ai" } -] -requires-python = ">=3.11" - -dependencies = [ - "common-module", - "db-repo-module", - "flo-cloud", - "opencv-python>=4.8.0", - "numpy>=1.24.0", - "pillow>=10.0.0", - "scikit-learn>=1.3.0", - "pytest>=8.4.1", - "pytest-asyncio>=0.26.0", -] - -[tool.uv.sources] -common-module = { workspace = true } -db-repo-module = { workspace = true } -flo-cloud = { workspace = true } - -[dependency-groups] -dev = [ - "pytest>=8.3.3,<9.0.0", - "pytest-asyncio>=0.24.0,<1.0.0", - "pytest-mock>=3.12.0", -] - - -[tool.pytest.ini_options] -asyncio_mode = "auto" - -[tool.uv] -package = true - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.wheel] -packages = ["image_search_module"] diff --git a/wavefront/server/modules/image_search_module/tests/conftest.py b/wavefront/server/modules/image_search_module/tests/conftest.py deleted file mode 100644 index 428fba00..00000000 --- a/wavefront/server/modules/image_search_module/tests/conftest.py +++ /dev/null @@ -1,35 +0,0 @@ -import pytest -import os -from pathlib import Path - - -from db_repo_module.database.connection import DatabaseClient, DatabaseConfig - - -@pytest.fixture(scope='session') -async def db_client(): - """Create database client for testing""" - db_config = DatabaseConfig( - username=os.getenv('DB_USERNAME', 'test_user'), - password=os.getenv('DB_PASSWORD', 'test_password'), - host=os.getenv('DB_HOST', 'localhost'), - port=os.getenv('DB_PORT', '5432'), - db_name=os.getenv('DB_NAME', 'test_db'), - ) - - db_client = DatabaseClient(db_config) - await db_client.connect() - yield db_client - await db_client.close() # Fix: Use correct method name - - -@pytest.fixture -def test_image_base64(): - """Provide a test image as base64 data URL""" - return 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' - - -@pytest.fixture -def test_images_dir(): - """Provide path to test images directory""" - return Path(__file__).parent / 'test_images' diff --git a/wavefront/server/modules/image_search_module/tests/db_setup.py b/wavefront/server/modules/image_search_module/tests/db_setup.py deleted file mode 100644 index 79431d41..00000000 --- a/wavefront/server/modules/image_search_module/tests/db_setup.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -Database setup utilities for IKB functionality. -Can be imported and used in tests or other scripts. -""" - -from typing import Optional -from db_repo_module.database.connection import DatabaseClient, DatabaseConfig -from db_repo_module.database.base import Base -from sqlalchemy import text - - -async def ensure_tables_exist(db_client: DatabaseClient) -> None: - """ - Ensure all IKB-related tables exist in the database with the correct schema. - This will drop and recreate tables to ensure they have the latest schema. - - Args: - db_client: DatabaseClient instance - """ - async with db_client._engine.begin() as connection: - # Drop existing tables in reverse order (due to foreign key constraints) - tables_to_drop = [ - 'sift_features', - 'reference_image_features', - 'image_knowledge_bases', - ] - - for table in tables_to_drop: - await connection.execute(text(f'DROP TABLE IF EXISTS {table} CASCADE;')) - - # Create all tables with the latest schema - await connection.run_sync(Base.metadata.create_all) - - -async def setup_test_database( - db_config: Optional[DatabaseConfig] = None, -) -> DatabaseClient: - """ - Setup a test database with all required tables. - - Args: - db_config: Optional database config. If None, uses environment variables. - - Returns: - DatabaseClient instance ready for use - """ - if db_config is None: - import os - - db_config = DatabaseConfig( - username=os.getenv('DB_USERNAME'), - password=os.getenv('DB_PASSWORD'), - host=os.getenv('DB_HOST'), - port=os.getenv('DB_PORT'), - db_name=os.getenv('DB_NAME'), - ) - - db_client = DatabaseClient(db_config) - await db_client.connect() - await ensure_tables_exist(db_client) - - return db_client - - -async def cleanup_test_database(db_client: DatabaseClient) -> None: - """ - Clean up test database by dropping IKB tables. - - Args: - db_client: DatabaseClient instance - """ - async with db_client._engine.begin() as connection: - # Drop tables in reverse order (due to foreign key constraints) - tables_to_drop = [ - 'sift_features', - 'reference_image_features', - 'image_knowledge_bases', - ] - - for table in tables_to_drop: - await connection.execute(text(f'DROP TABLE IF EXISTS {table} CASCADE;')) - - await db_client.close() - - -async def verify_tables_exist(db_client: DatabaseClient) -> None: - """ - Verify that all required tables exist and have the correct columns. - - Args: - db_client: DatabaseClient instance - """ - async with db_client._engine.begin() as connection: - # Check if tables exist - result = await connection.execute( - text(""" - SELECT table_name - FROM information_schema.tables - WHERE table_schema = 'public' - AND table_name IN ('image_knowledge_bases', 'reference_image_features', 'sift_features') - ORDER BY table_name; - """) - ) - - tables = [row[0] for row in result.fetchall()] - print(f'Found tables: {tables}') - - # Check if reference_image_features has ikb_id column - if 'reference_image_features' in tables: - result = await connection.execute( - text(""" - SELECT column_name - FROM information_schema.columns - WHERE table_name = 'reference_image_features' - AND column_name = 'ikb_id'; - """) - ) - - ikb_id_column = result.fetchone() - if ikb_id_column: - print('โœ… reference_image_features table has ikb_id column') - else: - print('โŒ reference_image_features table missing ikb_id column') diff --git a/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py b/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py deleted file mode 100644 index e933cb74..00000000 --- a/wavefront/server/modules/image_search_module/tests/test_crud_endpoints.py +++ /dev/null @@ -1,627 +0,0 @@ -""" -Comprehensive CRUD endpoint tests for Image Search Module -Tests all endpoints: Create, Read, Update, Delete operations -""" - -import pytest -import base64 -from unittest.mock import Mock, AsyncMock, MagicMock -from fastapi import FastAPI -from fastapi.testclient import TestClient -from datetime import datetime -from uuid import uuid4 - -from image_search_module.controllers.image_search_controller import image_search_router -from image_search_module.image_search_container import ImageSearchContainer -from image_search_module.algorithms.base import AlgorithmType -from image_search_module.models.ikb_models import ( - IKBType, - IKBStatus, -) -from common_module.common_container import CommonContainer -from db_repo_module.db_repo_container import DatabaseModuleContainer - - -class MockDbClient: - def __init__(self): - # Create a mock session factory - self.session = MagicMock() - # Mock the async context manager behavior - mock_session = MagicMock() - mock_session.add = Mock() - mock_session.commit = AsyncMock() - mock_session.refresh = AsyncMock() - mock_session.query = Mock() - mock_session.get = AsyncMock() - - self.session.return_value.__aenter__ = AsyncMock(return_value=mock_session) - self.session.return_value.__aexit__ = AsyncMock(return_value=None) - - -# Create a custom mock IKBInfo that serializes properly -class MockIKBInfo: - """Mock IKBInfo that serializes enums properly""" - - def __init__(self, **kwargs): - self.ikb_id = kwargs.get('ikb_id', str(uuid4())) - self.name = kwargs.get('name', 'Test IKB') - self.description = kwargs.get('description', 'Test IKB for unit testing') - self.ikb_type = kwargs.get('ikb_type', IKBType.GOLD_MATCHING) - self.algorithm_type = kwargs.get('algorithm_type', AlgorithmType.SIFT) - self.status = kwargs.get('status', IKBStatus.ACTIVE) - self.image_count = kwargs.get('image_count', 0) - self.created_at = kwargs.get('created_at', datetime.now()) - self.updated_at = kwargs.get('updated_at', datetime.now()) - self.config = kwargs.get('config', {'threshold': 0.8}) - - def dict(self): - """Return dictionary with enum values serialized as strings""" - return { - 'ikb_id': self.ikb_id, - 'name': self.name, - 'description': self.description, - 'ikb_type': self.ikb_type.value - if hasattr(self.ikb_type, 'value') - else str(self.ikb_type), - 'algorithm_type': self.algorithm_type.value - if hasattr(self.algorithm_type, 'value') - else str(self.algorithm_type), - 'status': self.status.value - if hasattr(self.status, 'value') - else str(self.status), - 'image_count': self.image_count, - 'created_at': self.created_at.isoformat() - if isinstance(self.created_at, datetime) - else self.created_at, - 'updated_at': self.updated_at.isoformat() - if isinstance(self.updated_at, datetime) - else self.updated_at, - 'config': self.config, - } - - def model_dump(self, mode='json'): - """Pydantic v2 compatibility - calls dict() method""" - return self.dict() - - -# Create a custom mock IKBSearchResponse that serializes properly -class MockIKBSearchResponse: - """Mock IKBSearchResponse that serializes properly""" - - def __init__(self, **kwargs): - self.query_id = kwargs.get('query_id', str(uuid4())) - self.ikb_id = kwargs.get('ikb_id', str(uuid4())) - self.ikb_name = kwargs.get('ikb_name', 'Test IKB') - self.algorithm_used = kwargs.get('algorithm_used', 'sift') - self.matches = kwargs.get('matches', []) - self.total_images_searched = kwargs.get('total_images_searched', 0) - self.processing_time_ms = kwargs.get('processing_time_ms', 0.0) - - def dict(self): - """Return dictionary representation""" - return { - 'query_id': self.query_id, - 'ikb_id': self.ikb_id, - 'ikb_name': self.ikb_name, - 'algorithm_used': self.algorithm_used, - 'matches': self.matches, - 'total_images_searched': self.total_images_searched, - 'processing_time_ms': self.processing_time_ms, - } - - -@pytest.fixture -def mock_containers(): - """Setup mock containers for testing""" - # Mock database container - db_repo_container = DatabaseModuleContainer() - mock_db_client = MockDbClient() - db_repo_container.db_client.override(mock_db_client) - - # Mock common container - common_container = CommonContainer() - mock_cache_manager = Mock() - mock_cache_manager.get_str.return_value = ( - '{"user_id": "test_user", "session_id": "test_session"}' - ) - mock_cache_manager.add = Mock() - common_container.cache_manager.override(mock_cache_manager) - - # Mock image search container - mock_cloud_storage_manager = Mock() - mock_cloud_storage_manager.save_file = AsyncMock( - return_value='mock://storage/test.jpg' - ) - mock_cloud_storage_manager.get_file = AsyncMock(return_value=b'mock_data') - - image_search_container = ImageSearchContainer( - db_client=mock_db_client, - cloud_storage_manager=mock_cloud_storage_manager, - ) - - # Override the problematic providers directly - image_search_container.active_algorithm_type.override(AlgorithmType.SIFT) - - # Mock the repositories with proper async methods and correct return types - mock_ikb_repository = Mock() - mock_reference_features_repository = Mock() - mock_sift_features_repository = Mock() - - # Override the repository providers - image_search_container.ikb_repository.override(mock_ikb_repository) - image_search_container.reference_features_repository.override( - mock_reference_features_repository - ) - image_search_container.sift_features_repository.override( - mock_sift_features_repository - ) - - # Mock the services that depend on config - mock_algorithm_factory = Mock() - mock_algorithm_service = Mock() - mock_reference_image_service = Mock() - mock_image_matching_service = Mock() - - image_search_container.algorithm_factory.override(mock_algorithm_factory) - image_search_container.algorithm_service.override(mock_algorithm_service) - image_search_container.reference_image_service.override( - mock_reference_image_service - ) - image_search_container.image_matching_service.override(mock_image_matching_service) - - # Wire containers - common_container.wire(packages=['image_search_module.controllers']) - image_search_container.wire(packages=['image_search_module.controllers']) - - yield db_repo_container, common_container, image_search_container - - # Cleanup - common_container.unwire() - image_search_container.unwire() - - -@pytest.fixture -def test_app(mock_containers): - """Create test FastAPI app""" - app = FastAPI() - app.include_router(image_search_router, prefix='/floware') - return app - - -@pytest.fixture -def test_client(test_app): - """Create test client""" - return TestClient(test_app) - - -@pytest.fixture -def sample_image_data(): - """Create a sample base64 image data URL for testing""" - # Create a minimal 1x1 pixel PNG in base64 - # This is a valid but minimal PNG file - png_data = base64.b64decode( - 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==' - ) - return f'data:image/png;base64,{base64.b64encode(png_data).decode()}' - - -@pytest.fixture -def sample_ikb_data(): - """Sample IKB data for testing""" - return { - 'name': 'Test IKB', - 'description': 'Test IKB for unit testing', - 'ikb_type': 'gold_matching', - 'algorithm_type': 'sift', - 'config': {'threshold': 0.8}, - } - - -@pytest.fixture -def mock_ikb_info(): - """Mock IKB info object that serializes properly""" - return MockIKBInfo( - ikb_id=str(uuid4()), - name='Test IKB', - description='Test IKB for unit testing', - ikb_type=IKBType.GOLD_MATCHING, - algorithm_type=AlgorithmType.SIFT, - status=IKBStatus.ACTIVE, - image_count=0, - created_at=datetime.now(), - updated_at=datetime.now(), - config={'threshold': 0.8}, - ) - - -class TestIKBCreateEndpoint: - """Test CREATE operations""" - - def test_create_ikb_success( - self, test_client, sample_ikb_data, mock_containers, mock_ikb_info - ): - """Test successful IKB creation""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service to return our mock IKB info - mock_ikb_service = Mock() - mock_ikb_service.create_ikb = AsyncMock(return_value=mock_ikb_info) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.post('/floware/ikb/create', json=sample_ikb_data) - - assert response.status_code == 201 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert response_data['data']['name'] == sample_ikb_data['name'] - assert response_data['data']['ikb_type'] == sample_ikb_data['ikb_type'] - assert ( - response_data['data']['algorithm_type'] == sample_ikb_data['algorithm_type'] - ) - - def test_create_ikb_invalid_data(self, test_client): - """Test IKB creation with invalid data""" - invalid_data = { - 'name': '', # Empty name should fail validation - 'ikb_type': 'invalid_type', - 'algorithm_type': 'invalid_algorithm', - } - - response = test_client.post('/floware/ikb/create', json=invalid_data) - assert response.status_code == 422 # Validation error - - def test_create_ikb_missing_required_fields(self, test_client): - """Test IKB creation with missing required fields""" - incomplete_data = { - 'name': 'Test IKB' - # Missing ikb_type and algorithm_type - } - - response = test_client.post('/floware/ikb/create', json=incomplete_data) - assert response.status_code == 422 # Validation error - - -class TestIKBReadEndpoints: - """Test READ operations""" - - def test_list_ikbs_success(self, test_client, mock_containers, mock_ikb_info): - """Test successful IKB listing""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service to return a list of IKBs - mock_ikb_service = Mock() - mock_ikb_service.list_ikbs = AsyncMock(return_value=[mock_ikb_info]) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.get('/floware/ikb/') - - assert response.status_code == 200 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert 'ikbs' in response_data['data'] - assert len(response_data['data']['ikbs']) == 1 - assert response_data['data']['ikbs'][0]['name'] == mock_ikb_info.name - - def test_list_ikbs_with_type_filter( - self, test_client, mock_containers, mock_ikb_info - ): - """Test IKB listing with type filter""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service - mock_ikb_service = Mock() - mock_ikb_service.list_ikbs = AsyncMock(return_value=[mock_ikb_info]) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.get('/floware/ikb/?ikb_type=gold_matching') - - assert response.status_code == 200 - # Verify that the service was called with the correct filter - mock_ikb_service.list_ikbs.assert_called_once_with( - ikb_type=IKBType.GOLD_MATCHING - ) - - def test_list_ikbs_empty(self, test_client, mock_containers): - """Test IKB listing when no IKBs exist""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service to return empty list - mock_ikb_service = Mock() - mock_ikb_service.list_ikbs = AsyncMock(return_value=[]) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.get('/floware/ikb/') - - assert response.status_code == 200 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert response_data['data']['ikbs'] == [] - - def test_get_ikb_success(self, test_client, mock_containers, mock_ikb_info): - """Test successful IKB retrieval by ID""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service - mock_ikb_service = Mock() - mock_ikb_service.get_ikb = AsyncMock(return_value=mock_ikb_info) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.get(f'/floware/ikb/{mock_ikb_info.ikb_id}') - - assert response.status_code == 200 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert response_data['data']['ikb_id'] == mock_ikb_info.ikb_id - assert response_data['data']['name'] == mock_ikb_info.name - - def test_get_ikb_not_found(self, test_client, mock_containers): - """Test IKB retrieval when IKB doesn't exist""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service to return None (not found) - mock_ikb_service = Mock() - mock_ikb_service.get_ikb = AsyncMock(return_value=None) - image_search_container.ikb_service.override(mock_ikb_service) - - fake_id = str(uuid4()) - response = test_client.get(f'/floware/ikb/{fake_id}') - - assert response.status_code == 404 - response_data = response.json() - assert response_data['meta']['status'] == 'failure' - assert f'IKB with ID {fake_id} not found' in response_data['meta']['error'] - - -class TestIKBUpdateOperations: - """Test UPDATE operations (adding images to IKB)""" - - def test_add_image_to_ikb_success( - self, test_client, mock_containers, sample_image_data, mock_ikb_info - ): - """Test successful image addition to IKB""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service - mock_ikb_service = Mock() - mock_result = { - 'status': 'success', - 'reference_id': str(uuid4()), - 'message': 'Image added successfully', - } - mock_ikb_service.add_image_to_ikb = AsyncMock(return_value=mock_result) - image_search_container.ikb_service.override(mock_ikb_service) - - payload = { - 'image_data': sample_image_data, - 'reference_id': 'test_ref_123', - 'metadata': {'source': 'test'}, - } - - response = test_client.post( - f'/floware/ikb/{mock_ikb_info.ikb_id}/add', json=payload - ) - - assert response.status_code == 201 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert response_data['data']['status'] == 'success' - - def test_add_image_to_ikb_invalid_image_data(self, test_client, mock_ikb_info): - """Test image addition with invalid image data""" - invalid_payload = { - 'image_data': 'invalid_base64_data', # Invalid format - 'reference_id': 'test_ref_123', - } - - response = test_client.post( - f'/floware/ikb/{mock_ikb_info.ikb_id}/add', json=invalid_payload - ) - assert response.status_code == 422 # Validation error - - def test_search_in_ikb_success( - self, test_client, mock_containers, sample_image_data, mock_ikb_info - ): - """Test successful image search in IKB""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service - mock_ikb_service = Mock() - mock_search_response = MockIKBSearchResponse( - query_id=str(uuid4()), - ikb_id=mock_ikb_info.ikb_id, - ikb_name=mock_ikb_info.name, - algorithm_used='sift', - matches=[ - { - 'reference_id': 'ref_1', - 'match_score': 0.95, - 'confidence': 0.9, - 'metadata': {}, - } - ], - total_images_searched=10, - processing_time_ms=150.5, - ) - mock_ikb_service.search_in_ikb = AsyncMock(return_value=mock_search_response) - image_search_container.ikb_service.override(mock_ikb_service) - - payload = { - 'ikb_id': mock_ikb_info.ikb_id, - 'image_data': sample_image_data, - 'max_results': 5, - 'threshold': 0.8, - } - - response = test_client.post( - f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=payload - ) - - assert response.status_code == 200 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert response_data['data']['ikb_id'] == mock_ikb_info.ikb_id - assert response_data['data']['algorithm_used'] == 'sift' - assert len(response_data['data']['matches']) == 1 - - def test_search_in_ikb_invalid_image_data(self, test_client, mock_ikb_info): - """Test image search with invalid image data""" - invalid_payload = { - 'ikb_id': mock_ikb_info.ikb_id, - 'image_data': 'invalid_base64_data', # Invalid format - 'max_results': 5, - } - - response = test_client.post( - f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=invalid_payload - ) - assert response.status_code == 422 # Validation error - - def test_search_in_ikb_invalid_max_results( - self, test_client, sample_image_data, mock_ikb_info - ): - """Test image search with invalid max_results parameter""" - invalid_payload = { - 'ikb_id': mock_ikb_info.ikb_id, - 'image_data': sample_image_data, - 'max_results': 150, # Exceeds maximum of 100 - } - - response = test_client.post( - f'/floware/ikb/{mock_ikb_info.ikb_id}/search', json=invalid_payload - ) - assert response.status_code == 422 # Validation error - - -class TestIKBDeleteEndpoint: - """Test DELETE operations""" - - def test_delete_ikb_success(self, test_client, mock_containers, mock_ikb_info): - """Test successful IKB deletion""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service - mock_ikb_service = Mock() - mock_ikb_service.delete_ikb = AsyncMock(return_value=True) - image_search_container.ikb_service.override(mock_ikb_service) - - response = test_client.delete(f'/floware/ikb/{mock_ikb_info.ikb_id}') - - assert response.status_code == 200 - response_data = response.json() - assert response_data['meta']['status'] == 'success' - assert 'deleted successfully' in response_data['data']['message'] - - def test_delete_ikb_not_found(self, test_client, mock_containers): - """Test IKB deletion when IKB doesn't exist""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock the IKB service to return False (not found) - mock_ikb_service = Mock() - mock_ikb_service.delete_ikb = AsyncMock(return_value=False) - image_search_container.ikb_service.override(mock_ikb_service) - - fake_id = str(uuid4()) - response = test_client.delete(f'/floware/ikb/{fake_id}') - - assert response.status_code == 404 - response_data = response.json() - assert response_data['meta']['status'] == 'failure' - assert f'IKB with ID {fake_id} not found' in response_data['meta']['error'] - - -class TestEndpointIntegration: - """Integration tests for complete workflows""" - - def test_complete_ikb_lifecycle( - self, test_client, mock_containers, sample_ikb_data, sample_image_data - ): - """Test complete IKB lifecycle: create -> add image -> search -> delete""" - db_repo_container, common_container, image_search_container = mock_containers - - # Create a mock IKB info that serializes properly - mock_ikb_info = MockIKBInfo( - ikb_id=str(uuid4()), - name=sample_ikb_data['name'], - description=sample_ikb_data['description'], - ikb_type=IKBType.GOLD_MATCHING, - algorithm_type=AlgorithmType.SIFT, - status=IKBStatus.ACTIVE, - image_count=0, - created_at=datetime.now(), - updated_at=datetime.now(), - config=sample_ikb_data['config'], - ) - - # Mock the IKB service for all operations - mock_ikb_service = Mock() - mock_ikb_service.create_ikb = AsyncMock(return_value=mock_ikb_info) - mock_ikb_service.add_image_to_ikb = AsyncMock( - return_value={'status': 'success', 'reference_id': 'ref_123'} - ) - mock_ikb_service.search_in_ikb = AsyncMock( - return_value=MockIKBSearchResponse( - query_id=str(uuid4()), - ikb_id=mock_ikb_info.ikb_id, - ikb_name=mock_ikb_info.name, - algorithm_used='sift', - matches=[], - total_images_searched=1, - processing_time_ms=100.0, - ) - ) - mock_ikb_service.delete_ikb = AsyncMock(return_value=True) - - image_search_container.ikb_service.override(mock_ikb_service) - - # 1. Create IKB - create_response = test_client.post('/floware/ikb/create', json=sample_ikb_data) - assert create_response.status_code == 201 - created_ikb = create_response.json()['data'] - ikb_id = created_ikb['ikb_id'] - - # 2. Add image to IKB - add_payload = { - 'ikb_id': ikb_id, - 'image_data': sample_image_data, - 'reference_id': 'test_ref_123', - } - add_response = test_client.post(f'/floware/ikb/{ikb_id}/add', json=add_payload) - assert add_response.status_code == 201 - - # 3. Search in IKB - search_payload = { - 'ikb_id': ikb_id, - 'image_data': sample_image_data, - 'max_results': 5, - } - search_response = test_client.post( - f'/floware/ikb/{ikb_id}/search', json=search_payload - ) - assert search_response.status_code == 200 - - # 4. Delete IKB - delete_response = test_client.delete(f'/floware/ikb/{ikb_id}') - assert delete_response.status_code == 200 - - def test_error_handling_consistency(self, test_client, mock_containers): - """Test that error responses are consistent across endpoints""" - db_repo_container, common_container, image_search_container = mock_containers - - # Mock service to raise an exception - mock_ikb_service = Mock() - mock_ikb_service.get_ikb = AsyncMock(side_effect=Exception('Database error')) - image_search_container.ikb_service.override(mock_ikb_service) - - fake_id = str(uuid4()) - - # Since the controller doesn't handle exceptions, this will result in a 500 error - # We need to catch the exception that will be raised by the test client - with pytest.raises(Exception) as exc_info: - test_client.get(f'/floware/ikb/{fake_id}') - - # Verify that the exception contains our expected error message - assert 'Database error' in str(exc_info.value) - - -if __name__ == '__main__': - pytest.main([__file__, '-v']) diff --git a/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py b/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py deleted file mode 100644 index 9541bfe2..00000000 --- a/wavefront/server/modules/image_search_module/tests/test_ikb_create_upload.py +++ /dev/null @@ -1,334 +0,0 @@ -# import asyncio -# import sys -# import base64 -# from pathlib import Path -# import pytest - - -# from image_search_module.services.ikb_service import IKBService -# from image_search_module.services.image_matching_service import ImageMatchingService -# from image_search_module.services.reference_image_service import ReferenceImageService -# from image_search_module.services.algorithm_service import AlgorithmService -# from image_search_module.services.algorithm_factory import AlgorithmFactory -# from image_search_module.repositories.sift_features_repository import ( -# SIFTFeaturesRepository, -# ) -# from image_search_module.repositories.ikb_repository import IKBRepository -# from image_search_module.algorithms.base import AlgorithmType - -# # from .db_setup import setup_test_database -# from image_search_module.models.ikb_models import ( -# CreateIKBRequest, -# IKBImageAddRequest, -# IKBType, -# IKBStatus, -# ) -# from db_repo_module.repositories.sql_alchemy_repository import SQLAlchemyRepository -# from db_repo_module.models.image_search_models import ( -# ReferenceImageFeatures, -# SIFTFeatures, -# ) -# from db_repo_module.models.ikb_models import ImageKnowledgeBase - -# import logging - - -# logging.basicConfig(level=logging.INFO) -# logger = logging.getLogger(__name__) - - -# class MockCloudStorage: -# """Mock cloud storage for testing""" - -# def __init__(self): -# self.files = {} - -# async def save_file(self, file_path: str, file_data: bytes) -> str: -# """Save file and return URL""" -# self.files[file_path] = file_data -# return f'mock://storage/{file_path}' - -# async def save_small_file( -# self, file_content: bytes, bucket_name: str, key: str -# ) -> str: -# """Save small file and return URL - matches the expected signature""" -# self.files[key] = file_content -# return f'https://mock-bucket.com/{key}' - - -# async def setup_services(): -# """Set up all required services for testing""" -# logger.info('Setting up services...') - -# # Setup database with tables -# db_client = await setup_test_database() - -# # Create repositories -# features_repository = SQLAlchemyRepository(ReferenceImageFeatures, db_client) -# sift_features_repository = SIFTFeaturesRepository(SIFTFeatures, db_client) - -# # Create IKB repository -# ikb_repository_db = SQLAlchemyRepository(ImageKnowledgeBase, db_client) -# ikb_repository = IKBRepository(ikb_repository_db) - -# # Create services -# algorithm_factory = AlgorithmFactory() -# algorithm_service = AlgorithmService(algorithm_factory) -# cloud_storage = MockCloudStorage() -# reference_image_service = ReferenceImageService( -# cloud_storage_manager=cloud_storage, -# features_repository=features_repository, -# sift_features_repository=sift_features_repository, -# algorithm_service=algorithm_service, -# bucket_name='test-bucket', -# ) -# image_matching_service = ImageMatchingService( -# algorithm_factory=algorithm_factory, -# reference_service=reference_image_service, -# active_algorithm_type=AlgorithmType.SIFT, -# algorithm_config={'sift': {'max_features': 1000}}, -# ) -# ikb_service = IKBService( -# image_matching_service=image_matching_service, -# reference_image_service=reference_image_service, -# ikb_repository=ikb_repository, -# ) - -# return ikb_service, db_client - - -# def image_to_base64_data_url(image_path: str) -> str: -# """Convert image file to base64 data URL""" -# with open(image_path, 'rb') as image_file: -# image_data = image_file.read() -# base64_data = base64.b64encode(image_data).decode('utf-8') -# return f'data:image/png;base64,{base64_data}' - - -# @pytest.mark.skip(reason='Skipping') -# async def test_create_ikb_and_add_images(): -# """Test creating an IKB and adding multiple images to it""" -# logger.info('๐Ÿงช Test: Create IKB and add Images') - -# ikb_service, db_client = await setup_services() - -# try: -# # Step 1: Create IKB -# logger.info(' Step 1: Creating IKB...') -# create_request = CreateIKBRequest( -# name='Gold Image Matching IKB', -# description='Test IKB for gold image matching and analysis', -# ikb_type=IKBType.GOLD_MATCHING, -# algorithm_type=AlgorithmType.SIFT, -# config={'threshold': 0.8, 'max_features': 1000}, -# ) - -# ikb_info = await ikb_service.create_ikb(create_request) -# logger.info(f'โœ… IKB created: {ikb_info.ikb_id}') -# logger.info(f' Name: {ikb_info.name}') -# logger.info(f' Type: {ikb_info.ikb_type}') -# logger.info(f' Algorithm: {ikb_info.algorithm_type}') -# logger.info(f' Status: {ikb_info.status}') -# logger.info(f' Image Count: {ikb_info.image_count}') - -# # Step 2: add multiple images using real test images -# logger.info('๏ฟฝ๏ฟฝ Step 2: adding images...') -# test_images_dir = Path(__file__).parent / 'test_images' - -# # Use the actual test images -# test_images = [ -# {'name': 'image1.png', 'description': 'Test image 1'}, -# {'name': 'image2.png', 'description': 'Test image 2'}, -# {'name': 'image3.png', 'description': 'Test image 3'}, -# ] - -# added_images = [] -# for i, img_info in enumerate(test_images, 1): -# logger.info(f" adding image {i}/3: {img_info['name']}") - -# # Get the full path to the test image -# image_path = test_images_dir / img_info['name'] - -# add_request = IKBImageAddRequest( -# ikb_id=ikb_info.ikb_id, -# image_data=image_to_base64_data_url(str(image_path)), -# reference_id=f'test_image_{i}', -# metadata={ -# 'description': img_info['description'], -# 'image_file': img_info['name'], -# }, -# ) - -# result = await ikb_service.add_image_to_ikb(add_request) -# added_images.append(result) -# logger.info(f" โœ… added: {result['reference_id']}") # Use correct key - -# # Step 3: Verify IKB properties -# logger.info('๐Ÿ” Step 3: Verifying IKB properties...') -# updated_ikb = await ikb_service.get_ikb(ikb_info.ikb_id) - -# assert updated_ikb is not None, 'IKB should exist' -# assert ( -# updated_ikb.image_count == 3 -# ), f'Expected 3 images, got {updated_ikb.image_count}' -# assert ( -# updated_ikb.status == IKBStatus.ACTIVE -# ), f'Expected ACTIVE status, got {updated_ikb.status}' - -# logger.info('โœ… IKB verification passed:') -# logger.info(f' - Image count: {updated_ikb.image_count}') -# logger.info(f' - Status: {updated_ikb.status}') -# logger.info(f' - Created at: {updated_ikb.created_at}') -# logger.info(f' - Updated at: {updated_ikb.updated_at}') - -# # Step 4: List all IKBs -# logger.info('๐Ÿ“‹ Step 4: Listing all IKBs...') -# all_ikbs = await ikb_service.list_ikbs() -# logger.info(f' Found {len(all_ikbs)} IKB(s)') -# for ikb in all_ikbs: -# logger.info(f' - {ikb.name} ({ikb.ikb_id}): {ikb.image_count} images') - -# logger.info('๐ŸŽ‰ Test completed successfully!') -# return ikb_info.ikb_id, added_images - -# except Exception as e: -# logger.error(f'โŒ Test failed: {e}') -# import traceback - -# traceback.print_exc() -# raise -# finally: -# await db_client.close() - - -# @pytest.mark.skip(reason='Skipping') -# async def test_ikb_search_with_query_image(): -# """Test searching within an IKB using the query image""" -# logger.info(' Test: IKB Search with Query Image') - -# ikb_service, db_client = await setup_services() - -# try: -# # Step 1: Create IKB -# logger.info(' Step 1: Creating IKB...') -# create_request = CreateIKBRequest( -# name='Photo Matching IKB', -# description='Test IKB for photo matching and similarity search', -# ikb_type=IKBType.PHOTO_MATCHING, -# algorithm_type=AlgorithmType.SIFT, -# config={'threshold': 0.7, 'max_features': 1000}, -# ) - -# ikb_info = await ikb_service.create_ikb(create_request) -# logger.info(f'โœ… Created IKB: {ikb_info.name} (ID: {ikb_info.ikb_id})') - -# # Step 2: add reference images using real test images -# logger.info('๐Ÿ“ค Step 2: adding reference images...') -# test_images_dir = Path(__file__).parent / 'test_images' - -# # add the reference images -# reference_images = [] -# for i, image_name in enumerate(['image1.png', 'image2.png', 'image3.png'], 1): -# image_path = test_images_dir / image_name - -# add_request = IKBImageAddRequest( -# ikb_id=ikb_info.ikb_id, -# image_data=image_to_base64_data_url(str(image_path)), -# reference_id=f'ref-photo-{i:03d}', -# metadata={'category': f'photo_{i}', 'add_order': i}, -# ) - -# add_result = await ikb_service.add_image_to_ikb(add_request) -# reference_images.append(add_result['reference_id']) # Use correct key -# logger.info(f"โœ… added reference {i}: {add_result['reference_id']}") - -# # Verify all images added -# updated_ikb = await ikb_service.get_ikb(ikb_info.ikb_id) -# assert updated_ikb.image_count == 3 -# logger.info(f'โœ… IKB has {updated_ikb.image_count} reference images') - -# # Step 3: Search with query image -# logger.info('๐Ÿ” Step 3: Searching with query image...') -# query_image_path = test_images_dir / 'query.png' - -# from image_search_module.models.ikb_models import IKBSearchRequest - -# search_request = IKBSearchRequest( -# ikb_id=ikb_info.ikb_id, -# image_data=image_to_base64_data_url(str(query_image_path)), -# max_results=5, -# threshold=0.6, -# ) - -# search_result = await ikb_service.search_in_ikb(search_request) -# logger.info(f'โœ… Search completed: {len(search_result.matches)} matches found') - -# # Verify search results -# assert search_result.ikb_id == ikb_info.ikb_id -# assert search_result.ikb_name == ikb_info.name -# assert search_result.algorithm_used == 'sift' -# assert search_result.total_images_searched == 3 -# assert len(search_result.matches) > 0 - -# # Step 4: Analyze search results -# logger.info(' Step 4: Analyzing search results...') -# logger.info(f' Query ID: {search_result.query_id}') -# logger.info(f' IKB: {search_result.ikb_name}') -# logger.info(f' Algorithm: {search_result.algorithm_used}') -# logger.info(f' Total images searched: {search_result.total_images_searched}') -# logger.info(f' Processing time: {search_result.processing_time_ms:.2f}ms') -# logger.info(f' Matches found: {len(search_result.matches)}') - -# # Log detailed match information -# for i, match in enumerate(search_result.matches): -# logger.info(f' Match {i+1}:') -# logger.info(f" - Reference ID: {match['reference_id']}") -# logger.info(f" - Match Score: {match['match_score']:.4f}") -# logger.info(f" - Is Match: {match['is_match']}") -# logger.info(f" - Confidence: {match['confidence']:.4f}") -# logger.info(f" - Processing Time: {match['processing_time_ms']:.2f}ms") - -# logger.info(' Search test completed successfully!') -# logger.info('๐Ÿ“Š Summary:') -# logger.info(f' - IKB: {ikb_info.name}') -# logger.info(f' - Reference Images: {len(reference_images)}') -# logger.info(' - Query Image: query.png') -# logger.info(f' - Matches Found: {len(search_result.matches)}') -# logger.info( -# f" - Best Match Score: {max(match['match_score'] for match in search_result.matches):.4f}" -# ) - -# return ikb_info.ikb_id, search_result - -# except Exception as e: -# logger.error(f'โŒ Test failed: {e}') -# import traceback - -# traceback.print_exc() -# raise -# finally: -# await db_client.close() - - -# async def main(): -# """Main test function""" -# try: -# # Test 1: Create IKB and add images -# # logger.info('๐Ÿš€ Starting IKB Create and add Test') -# # ikb_id, added_images = await test_create_ikb_and_add_images() -# # logger.info(f'โœ… Create/add test passed! IKB ID: {ikb_id}') -# # logger.info(f'โœ… added {len(added_images)} images') - -# # Test 2: Search with query image -# logger.info('\n๐Ÿš€ Starting IKB Search Test') -# search_ikb_id, search_result = await test_ikb_search_with_query_image() -# logger.info(f'โœ… Search test passed! IKB ID: {search_ikb_id}') -# logger.info(f'โœ… Found {len(search_result.matches)} matches') - -# except Exception as e: -# logger.error(f'โŒ Test failed: {e}') -# sys.exit(1) - - -# if __name__ == '__main__': -# asyncio.run(main()) diff --git a/wavefront/server/modules/image_search_module/tests/test_image_controller.py b/wavefront/server/modules/image_search_module/tests/test_image_controller.py deleted file mode 100644 index 585df502..00000000 --- a/wavefront/server/modules/image_search_module/tests/test_image_controller.py +++ /dev/null @@ -1,203 +0,0 @@ -""" -Simple test to verify image search module wiring without complex dependencies -""" - -import pytest -from unittest.mock import Mock, AsyncMock, MagicMock -from fastapi import FastAPI -from fastapi.testclient import TestClient - -from image_search_module.controllers.image_search_controller import image_search_router -from image_search_module.image_search_container import ImageSearchContainer -from image_search_module.algorithms.base import AlgorithmType -from common_module.common_container import CommonContainer -from db_repo_module.db_repo_container import DatabaseModuleContainer - - -class MockDbClient: - def __init__(self): - # Create a mock session factory - self.session = MagicMock() - # Mock the async context manager behavior - mock_session = MagicMock() - mock_session.add = Mock() - mock_session.commit = AsyncMock() - mock_session.refresh = AsyncMock() - mock_session.query = Mock() - mock_session.get = AsyncMock() - - self.session.return_value.__aenter__ = AsyncMock(return_value=mock_session) - self.session.return_value.__aexit__ = AsyncMock(return_value=None) - - -@pytest.fixture -def mock_containers(): - """Setup mock containers for testing""" - # Mock database container - db_repo_container = DatabaseModuleContainer() - mock_db_client = MockDbClient() - db_repo_container.db_client.override(mock_db_client) - - # Mock common container - common_container = CommonContainer() - mock_cache_manager = Mock() - mock_cache_manager.get_str.return_value = ( - '{"user_id": "test_user", "session_id": "test_session"}' - ) - mock_cache_manager.add = Mock() - common_container.cache_manager.override(mock_cache_manager) - - # Mock image search container - mock_cloud_storage_manager = Mock() - mock_cloud_storage_manager.save_file = AsyncMock( - return_value='mock://storage/test.jpg' - ) - mock_cloud_storage_manager.get_file = AsyncMock(return_value=b'mock_data') - - image_search_container = ImageSearchContainer( - db_client=mock_db_client, - cloud_storage_manager=mock_cloud_storage_manager, - ) - - # Override the problematic providers directly - image_search_container.active_algorithm_type.override(AlgorithmType.SIFT) - - # Mock the repositories with proper async methods and correct return types - mock_ikb_repository = Mock() - # Return empty dict instead of empty list for list_ikbs - mock_ikb_repository.list_ikbs = AsyncMock(return_value=[]) - mock_ikb_repository.get_ikb = AsyncMock(return_value=None) - mock_ikb_repository.create_ikb = AsyncMock(return_value=Mock()) - mock_ikb_repository.delete_ikb = AsyncMock(return_value=True) - - mock_reference_features_repository = Mock() - mock_reference_features_repository.create = AsyncMock(return_value=Mock()) - mock_reference_features_repository.get = AsyncMock(return_value=None) - - mock_sift_features_repository = Mock() - mock_sift_features_repository.create = AsyncMock(return_value=Mock()) - mock_sift_features_repository.get = AsyncMock(return_value=None) - - # Override the repository providers - image_search_container.ikb_repository.override(mock_ikb_repository) - image_search_container.reference_features_repository.override( - mock_reference_features_repository - ) - image_search_container.sift_features_repository.override( - mock_sift_features_repository - ) - - # Mock the services that depend on config - mock_algorithm_factory = Mock() - mock_algorithm_service = Mock() - mock_reference_image_service = Mock() - mock_reference_image_service.add_image_to_ikb = AsyncMock( - return_value={'status': 'success'} - ) - mock_reference_image_service.search_in_ikb = AsyncMock(return_value=Mock()) - - mock_image_matching_service = Mock() - - image_search_container.algorithm_factory.override(mock_algorithm_factory) - image_search_container.algorithm_service.override(mock_algorithm_service) - image_search_container.reference_image_service.override( - mock_reference_image_service - ) - image_search_container.image_matching_service.override(mock_image_matching_service) - - # Wire containers - common_container.wire(packages=['image_search_module.controllers']) - image_search_container.wire(packages=['image_search_module.controllers']) - - yield db_repo_container, common_container, image_search_container - - # Cleanup - common_container.unwire() - image_search_container.unwire() - - -@pytest.fixture -def test_app(mock_containers): - """Create test FastAPI app""" - app = FastAPI() - app.include_router(image_search_router, prefix='/floware') - return app - - -@pytest.fixture -def test_client(test_app): - """Create test client""" - return TestClient(test_app) - - -def test_app_creation(test_app): - """Test that the FastAPI app can be created with the router""" - assert test_app is not None - # Check that routes are registered - routes = [route.path for route in test_app.routes] - assert '/floware/ikb/' in routes - assert '/floware/ikb/create' in routes - - -def test_router_inclusion(test_app): - """Test that the image search router is properly included""" - # Check that the router is included - assert len(test_app.routes) > 0 - - # Check for specific routes - route_paths = [route.path for route in test_app.routes if hasattr(route, 'path')] - expected_paths = [ - '/floware/ikb/', - '/floware/ikb/create', - '/floware/ikb/{ikb_id}', - '/floware/ikb/{ikb_id}/add', - '/floware/ikb/{ikb_id}/search', - ] - - for expected_path in expected_paths: - assert any( - expected_path in path for path in route_paths - ), f'Route {expected_path} not found' - - -def test_container_wiring(mock_containers): - """Test that containers can be wired without errors""" - db_repo_container, common_container, image_search_container = mock_containers - - # Test that containers are properly set up - assert db_repo_container is not None - assert common_container is not None - assert image_search_container is not None - - # Test that services can be accessed - try: - ikb_service = image_search_container.ikb_service() - assert ikb_service is not None - print('โœ… IKB service created successfully') - except Exception as e: - pytest.fail(f'Failed to get ikb_service: {e}') - - -def test_basic_endpoint_access(test_client): - """Test that endpoints are accessible (even if they return errors)""" - # Test GET /floware/ikb/ - should return some response (not 404) - response = test_client.get('/floware/ikb/') - print(f'GET /floware/ikb/ status: {response.status_code}') - # Should not be 404 (route not found) - assert response.status_code != 404, 'Route not found - wiring issue' - - # Test POST /floware/ikb/create - should return some response (not 404) - response = test_client.post('/floware/ikb/create', json={}) - print(f'POST /floware/ikb/create status: {response.status_code}') - # Should not be 404 (route not found) - assert response.status_code != 404, 'Route not found - wiring issue' - - -def test_invalid_endpoint_returns_404(test_client): - """Test that invalid endpoints return 404""" - response = test_client.get('/floware/invalid-endpoint') - assert response.status_code == 404 - - -if __name__ == '__main__': - pytest.main([__file__, '-v']) diff --git a/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh b/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh deleted file mode 100755 index 1d306079..00000000 --- a/wavefront/server/modules/image_search_module/tests/test_images/local_search.sh +++ /dev/null @@ -1,150 +0,0 @@ -#!/bin/bash - -# Simple Image Search Test Script (No Authentication) - -set -e # Exit on any error - -# Configuration -BASE_URL="http://0.0.0.0:8001" -IMAGE_FILE="query.png" -IKB_ID="fc562847-e2cf-4f6e-bd1e-708a7a3be8f8" # Replace with your IKB ID - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -echo -e "${BLUE}=== Image Search Test (No Auth) ===${NC}" -echo "Base URL: $BASE_URL" -echo "Image File: $IMAGE_FILE" -echo "" - -# Step 1: Check if image file exists -if [ ! -f "$IMAGE_FILE" ]; then - echo -e "${RED}Error: Image file '$IMAGE_FILE' not found${NC}" - echo "Available files in test_images directory:" - ls -la "modules/image_search_module/tests/test_images/" 2>/dev/null || echo "Directory not found" - exit 1 -fi - -echo -e "${GREEN}โœ“ Image file found${NC}" -echo "" - -# Step 2: Prepare image data -echo -e "${YELLOW}Step 1: Preparing image data...${NC}" -base64 -i "$IMAGE_FILE" | tr -d '\n' > /tmp/image_base64.txt -echo -e "${GREEN}โœ“ Image converted to base64 ($(wc -c < /tmp/image_base64.txt) characters)${NC}" -echo "" - -# Step 3: Add image to IKB -echo -e "${YELLOW}Step 2: Adding image to IKB...${NC}" - -# Create request payload for adding image -cat > /tmp/add_payload.json << EOF -{ - "image_data": "data:image/png;base64,$(cat /tmp/image_base64.txt)", - "reference_id": "test_reference_$(date +%s)", - "metadata": { - "description": "Test image for no-auth script", - "category": "test", - "source": "no_auth_test", - "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)" - } -} -EOF - -echo "Request payload size: $(wc -c < /tmp/add_payload.json) characters" - -ADD_RESPONSE=$(curl -s -X POST \ - -H "Content-Type: application/json" \ - -d @/tmp/add_payload.json \ - "$BASE_URL/floware/ikb/$IKB_ID/add") - -echo "Add Image Response: $ADD_RESPONSE" -echo "" - -# Check if add was successful -ADD_STATUS=$(echo "$ADD_RESPONSE" | jq -r '.meta.status // "unknown"') -if [ "$ADD_STATUS" = "success" ]; then - echo -e "${GREEN}โœ“ Successfully added image to IKB${NC}" - - # Extract reference ID for verification - REFERENCE_ID=$(echo "$ADD_RESPONSE" | jq -r '.data.reference_id // "unknown"') - echo "Reference ID: $REFERENCE_ID" -else - echo -e "${RED}โœ— Failed to add image to IKB${NC}" - echo "Error: $(echo "$ADD_RESPONSE" | jq -r '.meta.error // "Unknown error"')" - # Continue to search anyway to test the search endpoint -fi - -echo "" - -# Step 4: Search for the same image -echo -e "${YELLOW}Step 3: Searching for the same image...${NC}" - -# Create search payload -cat > /tmp/search_payload.json << EOF -{ - "image_data": "data:image/png;base64,$(cat /tmp/image_base64.txt)", - "max_results": 5, - "threshold": 0.7 -} -EOF - -echo "Search payload size: $(wc -c < /tmp/search_payload.json) characters" - -SEARCH_RESPONSE=$(curl -s -X POST \ - -H "Content-Type: application/json" \ - -d @/tmp/search_payload.json \ - "$BASE_URL/floware/ikb/$IKB_ID/search") - -echo "Search Response: $SEARCH_RESPONSE" -echo "" - -# Check search results -SEARCH_STATUS=$(echo "$SEARCH_RESPONSE" | jq -r '.meta.status // "unknown"') -if [ "$SEARCH_STATUS" = "success" ]; then - echo -e "${GREEN}โœ“ Search completed successfully${NC}" - - # Extract and display match count - MATCH_COUNT=$(echo "$SEARCH_RESPONSE" | jq -r '.data.matches | length // 0') - echo "Number of matches found: $MATCH_COUNT" - - if [ "$MATCH_COUNT" -gt 0 ]; then - echo -e "${GREEN}โœ“ Found matching images!${NC}" - - # Display match details - echo "Match details:" - echo "$SEARCH_RESPONSE" | jq '.data.matches[] | {reference_id: .reference_id, match_score: .match_score, confidence: .confidence, is_match: .is_match}' - - # Check if our added image is in the results - if [ -n "$REFERENCE_ID" ] && [ "$REFERENCE_ID" != "unknown" ]; then - FOUND_OUR_IMAGE=$(echo "$SEARCH_RESPONSE" | jq -r --arg ref_id "$REFERENCE_ID" '.data.matches[] | select(.reference_id == $ref_id) | .reference_id // empty') - if [ -n "$FOUND_OUR_IMAGE" ]; then - echo -e "${GREEN}โœ“ Our added image was found in search results!${NC}" - else - echo -e "${YELLOW}โš  Our added image was not found in search results${NC}" - fi - fi - else - echo -e "${YELLOW}โš  No matches found${NC}" - fi -else - echo -e "${RED}โœ— Search failed${NC}" - echo "Error: $(echo "$SEARCH_RESPONSE" | jq -r '.meta.error // "Unknown error"')" -fi - -echo "" - -# Step 5: Test IKB info endpoint -echo -e "${YELLOW}Step 4: Testing IKB info endpoint...${NC}" -IKB_INFO_RESPONSE=$(curl -s -X GET "$BASE_URL/floware/ikb/$IKB_ID") -echo "IKB Info Response: $IKB_INFO_RESPONSE" - -# Clean up temporary files -rm -f /tmp/image_base64.txt /tmp/add_payload.json /tmp/search_payload.json - -echo "" -echo -e "${BLUE}=== Test Complete ===${NC}" diff --git a/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh b/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh deleted file mode 100755 index f059b66a..00000000 --- a/wavefront/server/modules/image_search_module/tests/test_images/staging_search.sh +++ /dev/null @@ -1,208 +0,0 @@ -#!/bin/bash - -# Image Search API Testing Script for Staging -# Usage: ./test_image_search_staging.sh - -set -e # Exit on any error - -# Configuration -STAGING_BASE_URL="https://staging.rootflo.ai" -AUTH_EMAIL="" # Replace with your email -AUTH_PASSWORD="" # Replace with your password -IMAGE_FILE="image1.png" # Replace with your image file path -IKB_ID="fc562847-e2cf-4f6e-bd1e-708a7a3be8f8" # Replace with your IKB ID - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -echo -e "${BLUE}=== Image Search API Testing Script for Staging ===${NC}" -echo "Base URL: $STAGING_BASE_URL" -echo "" - -# Function to make authenticated requests -make_authenticated_request() { - local method=$1 - local endpoint=$2 - local data=$3 - local content_type=${4:-"application/json"} - - if [ -z "$BEARER_TOKEN" ]; then - echo -e "${RED}Error: No bearer token available${NC}" - return 1 - fi - - if [ -n "$data" ]; then - # Check if data is too large for command line (roughly > 1MB) - if [ ${#data} -gt 1000000 ]; then - # Use temporary file for large payloads - local temp_file=$(mktemp) - echo "$data" > "$temp_file" - curl -s -X "$method" \ - -H "Content-Type: $content_type" \ - -H "Authorization: Bearer $BEARER_TOKEN" \ - -d @"$temp_file" \ - "$STAGING_BASE_URL$endpoint" - rm -f "$temp_file" - else - # Use direct data for small payloads - curl -s -X "$method" \ - -H "Content-Type: $content_type" \ - -H "Authorization: Bearer $BEARER_TOKEN" \ - -d "$data" \ - "$STAGING_BASE_URL$endpoint" - fi - else - curl -s -X "$method" \ - -H "Authorization: Bearer $BEARER_TOKEN" \ - "$STAGING_BASE_URL$endpoint" - fi -} - -# Step 1: Authenticate and get bearer token -echo -e "${YELLOW}Step 1: Authenticating...${NC}" -AUTH_RESPONSE=$(curl -s -X POST \ - -H "Content-Type: application/json" \ - -d "{\"email\": \"$AUTH_EMAIL\", \"password\": \"$AUTH_PASSWORD\"}" \ - "$STAGING_BASE_URL/floware/v1/authenticate") - -echo "Auth Response: $AUTH_RESPONSE" - -# Extract bearer token from response -BEARER_TOKEN=$(echo "$AUTH_RESPONSE" | jq -r '.data.user.access_token // empty') - -if [ -z "$BEARER_TOKEN" ] || [ "$BEARER_TOKEN" = "null" ]; then - echo -e "${RED}Error: Failed to get bearer token${NC}" - echo "Response: $AUTH_RESPONSE" - exit 1 -fi - -echo -e "${GREEN}โœ“ Successfully authenticated${NC}" -echo "Bearer Token: ${BEARER_TOKEN:0:50}..." -echo "" - -# Step 2: Check if image file exists -if [ ! -f "$IMAGE_FILE" ]; then - echo -e "${RED}Error: Image file '$IMAGE_FILE' not found${NC}" - exit 1 -fi - -echo -e "${YELLOW}Step 2: Preparing image data...${NC}" - -# Convert image to base64 -BASE64_DATA=$(base64 -i "$IMAGE_FILE" | tr -d '\n') -IMAGE_DATA_URL="data:image/png;base64,${BASE64_DATA}" - -echo -e "${GREEN}โœ“ Image converted to base64 (${#BASE64_DATA} characters)${NC}" -echo "" - -# Step 3: Test IKB endpoints -echo -e "${YELLOW}Step 3: Testing IKB endpoints...${NC}" - -# 3a. List all IKBs -echo -e "${BLUE}3a. Listing all IKBs...${NC}" -LIST_RESPONSE=$(make_authenticated_request "GET" "/floware/ikb/") -echo "List IKB Response: $LIST_RESPONSE" -echo "" - -# 3b. Get specific IKB info -echo -e "${BLUE}3b. Getting IKB info for ID: $IKB_ID...${NC}" -IKB_INFO_RESPONSE=$(make_authenticated_request "GET" "/floware/ikb/$IKB_ID") -echo "IKB Info Response: $IKB_INFO_RESPONSE" -echo "" - -# 3c. Add image to IKB -echo -e "${BLUE}3c. Adding image to IKB...${NC}" - -# Create request payload -REQUEST_PAYLOAD=$(cat << EOF -{ - "image_data": "$IMAGE_DATA_URL", - "reference_id": "test_reference_$(date +%s)", - "metadata": { - "description": "Test image for IKB via staging script", - "category": "test", - "source": "staging_test", - "timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)" - } -} -EOF -) - -echo "Request payload size: $(echo "$REQUEST_PAYLOAD" | wc -c) characters" - -ADD_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$IKB_ID/add" "$REQUEST_PAYLOAD") -echo "Add Image Response: $ADD_RESPONSE" -echo "" - -# 3d. Search in IKB -echo -e "${BLUE}3d. Searching in IKB...${NC}" - -SEARCH_PAYLOAD=$(cat << EOF -{ - "image_data": "$IMAGE_DATA_URL", - "max_results": 5, - "threshold": 0.7 -} -EOF -) - -SEARCH_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$IKB_ID/search" "$SEARCH_PAYLOAD") -echo "Search Response: $SEARCH_RESPONSE" -echo "" - -# Step 4: Create a new IKB (optional test) -echo -e "${YELLOW}Step 4: Testing IKB creation...${NC}" - -CREATE_IKB_PAYLOAD=$(cat << EOF -{ - "name": "Test IKB $(date +%Y%m%d_%H%M%S)", - "description": "Test IKB created via staging script", - "ikb_type": "photo_matching", - "algorithm_type": "sift", - "config": { - "max_keypoints": 5000, - "match_threshold": 0.7 - } -} -EOF -) - -CREATE_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/create" "$CREATE_IKB_PAYLOAD") -echo "Create IKB Response: $CREATE_RESPONSE" - -# Extract new IKB ID if creation was successful -NEW_IKB_ID=$(echo "$CREATE_RESPONSE" | jq -r '.data.ikb_id // empty') - -if [ -n "$NEW_IKB_ID" ] && [ "$NEW_IKB_ID" != "null" ]; then - echo -e "${GREEN}โœ“ Successfully created new IKB with ID: $NEW_IKB_ID${NC}" - - # Test adding image to new IKB - echo -e "${BLUE}Adding image to newly created IKB...${NC}" - ADD_TO_NEW_RESPONSE=$(make_authenticated_request "POST" "/floware/ikb/$NEW_IKB_ID/add" "$REQUEST_PAYLOAD") - echo "Add to New IKB Response: $ADD_TO_NEW_RESPONSE" - echo "" - - # Clean up: Delete the test IKB - echo -e "${BLUE}Cleaning up: Deleting test IKB...${NC}" - DELETE_RESPONSE=$(make_authenticated_request "DELETE" "/floware/ikb/$NEW_IKB_ID") - echo "Delete Response: $DELETE_RESPONSE" -else - echo -e "${YELLOW}โš  IKB creation may have failed or returned unexpected format${NC}" -fi - -echo "" -echo -e "${GREEN}=== Testing Complete ===${NC}" -echo "All API endpoints have been tested successfully!" -echo "" -echo -e "${BLUE}Summary of tested endpoints:${NC}" -echo "โœ“ POST /floware/v1/authenticate - Authentication" -echo "โœ“ GET /floware/ikb/ - List IKBs" -echo "โœ“ GET /floware/ikb/{ikb_id} - Get IKB info" -echo "โœ“ POST /floware/ikb/{ikb_id}/add - Add image to IKB" -echo "โœ“ POST /floware/ikb/{ikb_id}/search - Search in IKB" -echo "โœ“ POST /floware/ikb/create - Create new IKB" -echo "โœ“ DELETE /floware/ikb/{ikb_id} - Delete IKB" diff --git a/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py b/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py index af8a8b06..0325f2f5 100644 --- a/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py +++ b/wavefront/server/modules/inference_module/inference_module/controllers/inference_controller.py @@ -17,16 +17,6 @@ inference_router = APIRouter() -async def handle_database_error(session, error_msg: str, error) -> JSONResponse: - """Handle database errors and return appropriate response.""" - await session.rollback() - logger.error(f'{error_msg} with error as {str(error)}') - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content=ResponseFormatter.buildErrorResponse(str(error)), - ) - - @inference_router.post('/v1/model-repository/model') @inject async def model_loading( diff --git a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py index f9260db9..c01872be 100644 --- a/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py +++ b/wavefront/server/modules/knowledge_base_module/knowledge_base_module/controllers/knowledge_base_document_controller.py @@ -33,16 +33,6 @@ kb_document_router = APIRouter() -async def handle_database_error(session, error_msg: str, error) -> JSONResponse: - """Handle database errors and return appropriate response.""" - await session.rollback() - logger.error(f'{error_msg} with error as {str(error)}') - return JSONResponse( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content=ResponseFormatter.buildErrorResponse(str(error)), - ) - - class KnowledgeBaseDocumentResponse(BaseModel): """Response model for knowledge base document data.""" diff --git a/wavefront/server/uv.lock b/wavefront/server/uv.lock index b5d737f3..3d973107 100644 --- a/wavefront/server/uv.lock +++ b/wavefront/server/uv.lock @@ -31,7 +31,6 @@ members = [ "floconsole", "floware", "gold-module", - "image-search-module", "inference-app", "inference-module", "insights-module", @@ -1483,7 +1482,6 @@ dependencies = [ { name = "dependency-injector" }, { name = "fastapi" }, { name = "gold-module" }, - { name = "image-search-module" }, { name = "inference-module" }, { name = "insights-module" }, { name = "knowledge-base-module" }, @@ -1509,7 +1507,6 @@ requires-dist = [ { name = "dependency-injector", specifier = ">=4.42.0,<5.0.0" }, { name = "fastapi", specifier = ">=0.115.2,<1.0.0" }, { name = "gold-module", editable = "modules/gold_module" }, - { name = "image-search-module", editable = "modules/image_search_module" }, { name = "inference-module", editable = "modules/inference_module" }, { name = "insights-module", editable = "modules/insights_module" }, { name = "knowledge-base-module", editable = "modules/knowledge_base_module" }, @@ -2277,49 +2274,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] -[[package]] -name = "image-search-module" -version = "0.1.0" -source = { editable = "modules/image_search_module" } -dependencies = [ - { name = "common-module" }, - { name = "db-repo-module" }, - { name = "flo-cloud" }, - { name = "numpy" }, - { name = "opencv-python" }, - { name = "pillow" }, - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "scikit-learn" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "pytest-mock" }, -] - -[package.metadata] -requires-dist = [ - { name = "common-module", editable = "modules/common_module" }, - { name = "db-repo-module", editable = "modules/db_repo_module" }, - { name = "flo-cloud", editable = "packages/flo_cloud" }, - { name = "numpy", specifier = ">=1.24.0" }, - { name = "opencv-python", specifier = ">=4.8.0" }, - { name = "pillow", specifier = ">=10.0.0" }, - { name = "pytest", specifier = ">=8.4.1" }, - { name = "pytest-asyncio", specifier = ">=0.26.0" }, - { name = "scikit-learn", specifier = ">=1.3.0" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pytest", specifier = ">=8.3.3,<9.0.0" }, - { name = "pytest-asyncio", specifier = ">=0.24.0,<1.0.0" }, - { name = "pytest-mock", specifier = ">=3.12.0" }, -] - [[package]] name = "imapclient" version = "2.1.0" @@ -4608,18 +4562,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" }, ] -[[package]] -name = "pytest-mock" -version = "3.15.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, -] - [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -5102,45 +5044,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/c3/c0be1135726618dc1e28d181b8c442403d8dbb9e273fd791de2d4384bcdd/safetensors-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:c7b214870df923cbc1593c3faee16bec59ea462758699bd3fee399d00aac072c", size = 320192, upload-time = "2025-08-08T13:13:59.467Z" }, ] -[[package]] -name = "scikit-learn" -version = "1.7.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "joblib" }, - { name = "numpy" }, - { name = "scipy" }, - { name = "threadpoolctl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/83/564e141eef908a5863a54da8ca342a137f45a0bfb71d1d79704c9894c9d1/scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e", size = 9331967, upload-time = "2025-09-09T08:20:32.421Z" }, - { url = "https://files.pythonhosted.org/packages/18/d6/ba863a4171ac9d7314c4d3fc251f015704a2caeee41ced89f321c049ed83/scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1", size = 8648645, upload-time = "2025-09-09T08:20:34.436Z" }, - { url = "https://files.pythonhosted.org/packages/ef/0e/97dbca66347b8cf0ea8b529e6bb9367e337ba2e8be0ef5c1a545232abfde/scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d", size = 9715424, upload-time = "2025-09-09T08:20:36.776Z" }, - { url = "https://files.pythonhosted.org/packages/f7/32/1f3b22e3207e1d2c883a7e09abb956362e7d1bd2f14458c7de258a26ac15/scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1", size = 9509234, upload-time = "2025-09-09T08:20:38.957Z" }, - { url = "https://files.pythonhosted.org/packages/9f/71/34ddbd21f1da67c7a768146968b4d0220ee6831e4bcbad3e03dd3eae88b6/scikit_learn-1.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:9b7ed8d58725030568523e937c43e56bc01cadb478fc43c042a9aca1dacb3ba1", size = 8894244, upload-time = "2025-09-09T08:20:41.166Z" }, - { url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818, upload-time = "2025-09-09T08:20:43.19Z" }, - { url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997, upload-time = "2025-09-09T08:20:45.468Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381, upload-time = "2025-09-09T08:20:47.982Z" }, - { url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296, upload-time = "2025-09-09T08:20:50.366Z" }, - { url = "https://files.pythonhosted.org/packages/c6/99/ed35197a158f1fdc2fe7c3680e9c70d0128f662e1fee4ed495f4b5e13db0/scikit_learn-1.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:6088aa475f0785e01bcf8529f55280a3d7d298679f50c0bb70a2364a82d0b290", size = 8731256, upload-time = "2025-09-09T08:20:52.627Z" }, - { url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382, upload-time = "2025-09-09T08:20:54.731Z" }, - { url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042, upload-time = "2025-09-09T08:20:57.313Z" }, - { url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180, upload-time = "2025-09-09T08:20:59.671Z" }, - { url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660, upload-time = "2025-09-09T08:21:01.71Z" }, - { url = "https://files.pythonhosted.org/packages/a7/c7/03000262759d7b6f38c836ff9d512f438a70d8a8ddae68ee80de72dcfb63/scikit_learn-1.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:63a9afd6f7b229aad94618c01c252ce9e6fa97918c5ca19c9a17a087d819440c", size = 8702057, upload-time = "2025-09-09T08:21:04.234Z" }, - { url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731, upload-time = "2025-09-09T08:21:06.381Z" }, - { url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852, upload-time = "2025-09-09T08:21:08.628Z" }, - { url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094, upload-time = "2025-09-09T08:21:11.486Z" }, - { url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436, upload-time = "2025-09-09T08:21:13.602Z" }, - { url = "https://files.pythonhosted.org/packages/b5/aa/8444be3cfb10451617ff9d177b3c190288f4563e6c50ff02728be67ad094/scikit_learn-1.7.2-cp313-cp313t-win_amd64.whl", hash = "sha256:57dc4deb1d3762c75d685507fbd0bc17160144b2f2ba4ccea5dc285ab0d0e973", size = 9275749, upload-time = "2025-09-09T08:21:15.96Z" }, - { url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906, upload-time = "2025-09-09T08:21:18.557Z" }, - { url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836, upload-time = "2025-09-09T08:21:20.695Z" }, - { url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236, upload-time = "2025-09-09T08:21:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593, upload-time = "2025-09-09T08:21:24.65Z" }, - { url = "https://files.pythonhosted.org/packages/8e/87/24f541b6d62b1794939ae6422f8023703bbf6900378b2b34e0b4384dfefd/scikit_learn-1.7.2-cp314-cp314-win_amd64.whl", hash = "sha256:bb24510ed3f9f61476181e4db51ce801e2ba37541def12dc9333b946fc7a9cf8", size = 8820007, upload-time = "2025-09-09T08:21:26.713Z" }, -] - [[package]] name = "scipy" version = "1.16.2" @@ -5501,15 +5404,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/3e/ac16b6bf28edf78296aea7d0cb416b49ed30282ac8c711662541015ee6f3/textract-1.6.5-py3-none-any.whl", hash = "sha256:0accd78ec42864e3e3827f9ef798ced9aac4727b664303b724a198fed73fa438", size = 23140, upload-time = "2022-03-10T10:49:30.384Z" }, ] -[[package]] -name = "threadpoolctl" -version = "3.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, -] - [[package]] name = "tiktoken" version = "0.9.0"